diff --git a/.coveragerc b/.coveragerc index b577fc29..561a370b 100644 --- a/.coveragerc +++ b/.coveragerc @@ -1,2 +1,6 @@ [run] -omit = zvt/recorders/* \ No newline at end of file +omit = + src/zvt/recorders/* + src/zvt/autocode/* + src/zvt/samples/* + *__init__* diff --git a/.github/workflows/build.yaml b/.github/workflows/build.yaml new file mode 100644 index 00000000..951db2fb --- /dev/null +++ b/.github/workflows/build.yaml @@ -0,0 +1,41 @@ +name: build + +on: [push] + +jobs: + build: + + runs-on: ubuntu-latest + strategy: + matrix: + python-version: [3.12] + + steps: + - uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - name: Cache pip + uses: actions/cache@v2 + with: + # This path is specific to Ubuntu + path: ~/.cache/pip + # Look to see if there is a cache hit for the corresponding requirements file + key: ${{ runner.os }}-pip-${{ hashFiles('requirements.txt') }} + restore-keys: | + ${{ runner.os }}-pip- + ${{ runner.os }}- + - name: Install dependencies + run: | + python -m pip install --upgrade pip + if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + - name: Test with pytest + run: | + pip install pytest + pip install pytest-cov + pytest ./tests --cov-config=.coveragerc --cov-report=xml --cov=./src/zvt --ignore=tests/recorders/ --ignore=tests/domain/ + - name: Codecov + uses: codecov/codecov-action@v2 + with: + verbose: true \ No newline at end of file diff --git a/.github/workflows/package.yaml b/.github/workflows/package.yaml new file mode 100644 index 00000000..1dd5933e --- /dev/null +++ b/.github/workflows/package.yaml @@ -0,0 +1,28 @@ +name: package + +on: + release: + types: [published] + +jobs: + deploy: + + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: '3.12' + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install build + - name: Build package + run: python -m build + - name: Publish package + uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29 + with: + user: __token__ + password: ${{ secrets.PYPI }} \ No newline at end of file diff --git a/.gitignore b/.gitignore index dded266f..431c2c47 100644 --- a/.gitignore +++ b/.gitignore @@ -65,6 +65,7 @@ instance/ # Sphinx documentation docs/_build/ +docs/source/api/_autosummary/ # PyBuilder target/ @@ -105,4 +106,6 @@ node_modules/ *running.ipynb -.DS_Store \ No newline at end of file +.DS_Store + +a.json \ No newline at end of file diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..078b5580 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,20 @@ +ci: + autoupdate_schedule: monthly +repos: + - repo: https://github.com/psf/black + rev: 24.8.0 + hooks: + - id: black +# - repo: https://github.com/PyCQA/flake8 +# rev: 4.0.1 +# hooks: +# - id: flake8 +# additional_dependencies: +# - flake8-bugbear +# - flake8-implicit-str-concat +# - repo: https://github.com/pre-commit/pre-commit-hooks +# rev: v4.0.1 +# hooks: +# - id: fix-byte-order-marker +# - id: trailing-whitespace +# - id: end-of-file-fixer diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 00000000..1be9fbf0 --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,16 @@ +version: 2 +build: + os: "ubuntu-20.04" + tools: + python: "3.12" +python: + install: + - requirements: requirements/docs.txt + - method: pip + path: . +sphinx: + builder: dirhtml + fail_on_warning: false + configuration: docs/source/conf.py +#formats: +# - pdf diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 1a7c0dda..00000000 --- a/.travis.yml +++ /dev/null @@ -1,24 +0,0 @@ -language: python -cache: pip -python: - - '3.6' -install: - - git fetch --tags --depth=500 - - pip install 'pytest>=3.6' --force-reinstall - - pip install pytest-cov codecov - - pip install -r ./requirements.txt -script: - - pytest ./tests --cov-config=.coveragerc --cov-report term --cov=./zvt --ignore=tests/recorders/ --ignore=tests/domain/ -after_success: - - codecov -deploy: - provider: pypi - user: foolcage - password: - secure: MvVCnUZUuTYxzs8R8kWTocOqZt2MGUX/W1hdKMxKJ9G9hFOYhzjEnbsF1Sc9yHB/0S1OY49068ScbN6iCuyKK2LZy/x3o7cKJgPnQt2LEjy6Yuu9MLxT6v8hJ0MTT3YCn0N8bNv4tOz7KkxxbZ8O/b5MgIKfdjBhVHEj92hhykYzyzlmG8mF+nxU/j0IGCAdxN9+IDioMIvCgnFqQhvkDwva4YbG6Uy+8YMVHFT3I+tSZRSmYxl/IwHJS+5tinI4TxX/ewrI5EznOe0HZvhF+eez+tGenS3pKF4hqF6t4RmKQX2kkdMuPFAvuveoMnPGiaSdoEMni1JPFnZL+3R4GVJPzk4F10v6AZPd10CARXqEwP23JCKAe0WvnbSBkV4iKpkvgxqPA59UwNQ90Jn4pDcTSao1WfRliAnBWCVj7S4x6xjEoNKPvOhXP3RPIYhgEFu4Ma4Cpihkof6VVtlg8VAJKH7j1vWmms3ShdddKXMeF2365sn5Owe671okYmMYMas/v47Y2Cz/0hwpVLuklNR5OYayXMXfUMIG2pH1pEFvg7y8v7ivy1EyCKGva+M/qEnoR1VGWNGSkypJHxG+w8dbtYwf3EYTA9fk/di3ygzepV7RrKJSDf8R+NwFrSfXeoEP07LonbtGN9iIz7Lp5PoB6rv99NHihlB47n1+PtM= - on: - tags: true -notifications: - email: - recipients: - - 5533061@qq.com \ No newline at end of file diff --git a/LICENSE b/LICENSE index 18774f7f..1e1a9056 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ MIT License -Copyright (c) 2019 zvtvz +Copyright (c) 2023 zvtvz Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/MANIFEST.in b/MANIFEST.in index f1f0b51c..7e45ce5f 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,4 +1,4 @@ include *.md include *.txt include LICENSE -recursive-include zvt/* * \ No newline at end of file +recursive-include src/zvt/* * \ No newline at end of file diff --git a/README-cn.md b/README-cn.md new file mode 100644 index 00000000..88a3e67e --- /dev/null +++ b/README-cn.md @@ -0,0 +1,719 @@ +[![github](https://img.shields.io/github/stars/zvtvz/zvt.svg)](https://github.com/zvtvz/zvt) +[![image](https://img.shields.io/pypi/v/zvt.svg)](https://pypi.org/project/zvt/) +[![image](https://img.shields.io/pypi/l/zvt.svg)](https://pypi.org/project/zvt/) +[![image](https://img.shields.io/pypi/pyversions/zvt.svg)](https://pypi.org/project/zvt/) +[![build](https://github.com/zvtvz/zvt/actions/workflows/build.yaml/badge.svg)](https://github.com/zvtvz/zvt/actions/workflows/build.yml) +[![package](https://github.com/zvtvz/zvt/actions/workflows/package.yaml/badge.svg)](https://github.com/zvtvz/zvt/actions/workflows/package.yaml) +[![Documentation Status](https://readthedocs.org/projects/zvt/badge/?version=latest)](https://zvt.readthedocs.io/en/latest/?badge=latest) +[![codecov.io](https://codecov.io/github/zvtvz/zvt/coverage.svg?branch=master)](https://codecov.io/github/zvtvz/zvt) +[![Downloads](https://pepy.tech/badge/zvt/month)](https://pepy.tech/project/zvt) + +**缘起** + +[炒股的三大原理](https://mp.weixin.qq.com/s/FoFR63wFSQIE_AyFubkZ6Q) + +**声明** + +本项目目前不保证任何向后兼容性,请谨慎升级。 +随着作者思想的变化,一些以前觉得重要的东西可能也变得不重要,从而可能不会进行维护。 +而一些新的东西的加入对你是否有用,需要自己去评估。 + + +**Read this in other languages: [English](README-cn.md).** + +**详细文档:[https://zvt.readthedocs.io/en/latest/](https://zvt.readthedocs.io/en/latest/)** + +## 市场模型 +ZVT 将市场抽象为如下的模型: + +

+ +* TradableEntity (交易标的) +* ActorEntity (市场参与者) +* EntityEvent (交易标的 和 市场参与者 发生的事件) + +## 快速开始 + +### 安装 +``` +python3 -m pip install -U zvt +``` + +### 使用展示 + +#### 主界面 + +#### Dash & Plotly UI +> 适用于回测和研究,不太适用于实时行情和用户交互 + +安装完成后,在命令行下输入 zvt +```shell +zvt +``` +打开 [http://127.0.0.1:8050/](http://127.0.0.1:8050/) + +> 这里展示的例子依赖后面的下载历史数据,数据更新请参考后面文档 + +

+

+ +> 系统的核心概念是可视化的,界面的名称与其一一对应,因此也是统一可扩展的。 + +> 你可以在你喜欢的ide里编写和运行策略,然后运行界面查看其相关的标的,因子,信号和净值展示。 + +#### 前后端分离的UI +> 更灵活和可扩展,更适合于处理实时行情和用户交互,结合ZVT的动态tag系统,提供了一种量化结合主观的交易方式 + +- 初始化tag系统 + +运行以下脚本: + +https://github.com/zvtvz/zvt/blob/master/src/zvt/tasks/init_tag_system.py +https://github.com/zvtvz/zvt/blob/master/src/zvt/tasks/stock_pool_runner.py +https://github.com/zvtvz/zvt/blob/master/src/zvt/tasks/qmt_data_runner.py +https://github.com/zvtvz/zvt/blob/master/src/zvt/tasks/qmt_tick_runner.py + +- 安装 uvicorn +```shell +pip install uvicorn +``` +- 运行 zvt server + +安装完成后,在命令行下输入 zvt_server +```shell +zvt_server +``` +或者从代码运行: +https://github.com/zvtvz/zvt/blob/master/src/zvt/zvt_server.py + +- api 文档 + +open [http://127.0.0.1:8090/docs](http://127.0.0.1:8090/docs) + +- 部署前端 + +前端代码: https://github.com/zvtvz/zvt_ui + +修改前端环境文件: +https://github.com/zvtvz/zvt_ui/blob/main/.env + +设置 {your server IP}, 即zvt_server服务的地址 + +```text +NEXT_PUBLIC_SERVER = {your server IP} +``` + +然后参考前端的readme启动前端服务 + +打开 [http://127.0.0.1:3000/trade](http://127.0.0.1:3000/trade) + +

+ +#### 见证奇迹的时刻 +``` +>>> from zvt.domain import Stock, Stock1dHfqKdata +>>> from zvt.ml import MaStockMLMachine +>>> Stock.record_data(provider="em") +>>> entity_ids = ["stock_sz_000001", "stock_sz_000338", "stock_sh_601318"] +>>> Stock1dHfqKdata.record_data(provider="em", entity_ids=entity_ids, sleeping_time=1) +>>> machine = MaStockMLMachine(entity_ids=["stock_sz_000001"], data_provider="em") +>>> machine.train() +>>> machine.predict() +>>> machine.draw_result(entity_id="stock_sz_000001") +``` +

+ +> 以上几行代码实现了:数据的抓取,持久化,增量更新,机器学习,预测,展示结果。 +> 熟悉系统的核心概念后,可以应用到市场中的任何标的。 + +### 核心概念 +``` +>>> from zvt.domain import * +``` + +### TradableEntity (交易标的) + +#### A股交易标的 +``` +>>> Stock.record_data() +>>> df = Stock.query_data(index='code') +>>> print(df) + + id entity_id timestamp entity_type exchange code name list_date end_date +code +000001 stock_sz_000001 stock_sz_000001 1991-04-03 stock sz 000001 平安银行 1991-04-03 None +000002 stock_sz_000002 stock_sz_000002 1991-01-29 stock sz 000002 万 科A 1991-01-29 None +000004 stock_sz_000004 stock_sz_000004 1990-12-01 stock sz 000004 国华网安 1990-12-01 None +000005 stock_sz_000005 stock_sz_000005 1990-12-10 stock sz 000005 世纪星源 1990-12-10 None +000006 stock_sz_000006 stock_sz_000006 1992-04-27 stock sz 000006 深振业A 1992-04-27 None +... ... ... ... ... ... ... ... ... ... +605507 stock_sh_605507 stock_sh_605507 2021-08-02 stock sh 605507 国邦医药 2021-08-02 None +605577 stock_sh_605577 stock_sh_605577 2021-08-24 stock sh 605577 龙版传媒 2021-08-24 None +605580 stock_sh_605580 stock_sh_605580 2021-08-19 stock sh 605580 恒盛能源 2021-08-19 None +605588 stock_sh_605588 stock_sh_605588 2021-08-12 stock sh 605588 冠石科技 2021-08-12 None +605589 stock_sh_605589 stock_sh_605589 2021-08-10 stock sh 605589 圣泉集团 2021-08-10 None + +[4136 rows x 9 columns] +``` + +#### 美股交易标的 +``` +>>> Stockus.record_data() +>>> df = Stockus.query_data(index='code') +>>> print(df) + + id entity_id timestamp entity_type exchange code name list_date end_date +code +A stockus_nyse_A stockus_nyse_A NaT stockus nyse A 安捷伦 None None +AA stockus_nyse_AA stockus_nyse_AA NaT stockus nyse AA 美国铝业 None None +AAC stockus_nyse_AAC stockus_nyse_AAC NaT stockus nyse AAC Ares Acquisition Corp-A None None +AACG stockus_nasdaq_AACG stockus_nasdaq_AACG NaT stockus nasdaq AACG ATA Creativity Global ADR None None +AACG stockus_nyse_AACG stockus_nyse_AACG NaT stockus nyse AACG ATA Creativity Global ADR None None +... ... ... ... ... ... ... ... ... ... +ZWRK stockus_nasdaq_ZWRK stockus_nasdaq_ZWRK NaT stockus nasdaq ZWRK Z-Work Acquisition Corp-A None None +ZY stockus_nasdaq_ZY stockus_nasdaq_ZY NaT stockus nasdaq ZY Zymergen Inc None None +ZYME stockus_nyse_ZYME stockus_nyse_ZYME NaT stockus nyse ZYME Zymeworks Inc None None +ZYNE stockus_nasdaq_ZYNE stockus_nasdaq_ZYNE NaT stockus nasdaq ZYNE Zynerba Pharmaceuticals Inc None None +ZYXI stockus_nasdaq_ZYXI stockus_nasdaq_ZYXI NaT stockus nasdaq ZYXI Zynex Inc None None + +[5826 rows x 9 columns] + +>>> Stockus.query_data(code='AAPL') + id entity_id timestamp entity_type exchange code name list_date end_date +0 stockus_nasdaq_AAPL stockus_nasdaq_AAPL None stockus nasdaq AAPL 苹果 None None +``` + +#### 港股交易标的 +``` +>>> Stockhk.record_data() +>>> df = Stockhk.query_data(index='code') +>>> print(df) + + id entity_id timestamp entity_type exchange code name list_date end_date +code +00001 stockhk_hk_00001 stockhk_hk_00001 NaT stockhk hk 00001 长和 None None +00002 stockhk_hk_00002 stockhk_hk_00002 NaT stockhk hk 00002 中电控股 None None +00003 stockhk_hk_00003 stockhk_hk_00003 NaT stockhk hk 00003 香港中华煤气 None None +00004 stockhk_hk_00004 stockhk_hk_00004 NaT stockhk hk 00004 九龙仓集团 None None +00005 stockhk_hk_00005 stockhk_hk_00005 NaT stockhk hk 00005 汇丰控股 None None +... ... ... ... ... ... ... ... ... ... +09996 stockhk_hk_09996 stockhk_hk_09996 NaT stockhk hk 09996 沛嘉医疗-B None None +09997 stockhk_hk_09997 stockhk_hk_09997 NaT stockhk hk 09997 康基医疗 None None +09998 stockhk_hk_09998 stockhk_hk_09998 NaT stockhk hk 09998 光荣控股 None None +09999 stockhk_hk_09999 stockhk_hk_09999 NaT stockhk hk 09999 网易-S None None +80737 stockhk_hk_80737 stockhk_hk_80737 NaT stockhk hk 80737 湾区发展-R None None + +[2597 rows x 9 columns] + +>>> df[df.code=='00700'] + + id entity_id timestamp entity_type exchange code name list_date end_date +2112 stockhk_hk_00700 stockhk_hk_00700 None stockhk hk 00700 腾讯控股 None None + +``` + +#### 还有更多 +``` +>>> from zvt.contract import * +>>> zvt_context.tradable_schema_map + +{'stockus': zvt.domain.meta.stockus_meta.Stockus, + 'stockhk': zvt.domain.meta.stockhk_meta.Stockhk, + 'index': zvt.domain.meta.index_meta.Index, + 'etf': zvt.domain.meta.etf_meta.Etf, + 'stock': zvt.domain.meta.stock_meta.Stock, + 'block': zvt.domain.meta.block_meta.Block, + 'fund': zvt.domain.meta.fund_meta.Fund} +``` + +其中key为交易标的的类型,value为其schema,系统为schema提供了统一的 **记录(record_data)** 和 **查询(query_data)** 方法。 + +``` +>>> Index.record_data() +>>> df=Index.query_data(filters=[Index.category=='scope',Index.exchange='sh']) +>>> print(df) + id entity_id timestamp entity_type exchange code name list_date end_date publisher category base_point +0 index_sh_000001 index_sh_000001 1990-12-19 index sh 000001 上证指数 1991-07-15 None csindex scope 100.00 +1 index_sh_000002 index_sh_000002 1990-12-19 index sh 000002 A股指数 1992-02-21 None csindex scope 100.00 +2 index_sh_000003 index_sh_000003 1992-02-21 index sh 000003 B股指数 1992-08-17 None csindex scope 100.00 +3 index_sh_000010 index_sh_000010 2002-06-28 index sh 000010 上证180 2002-07-01 None csindex scope 3299.06 +4 index_sh_000016 index_sh_000016 2003-12-31 index sh 000016 上证50 2004-01-02 None csindex scope 1000.00 +.. ... ... ... ... ... ... ... ... ... ... ... ... +25 index_sh_000020 index_sh_000020 2007-12-28 index sh 000020 中型综指 2008-05-12 None csindex scope 1000.00 +26 index_sh_000090 index_sh_000090 2009-12-31 index sh 000090 上证流通 2010-12-02 None csindex scope 1000.00 +27 index_sh_930903 index_sh_930903 2012-12-31 index sh 930903 中证A股 2016-10-18 None csindex scope 1000.00 +28 index_sh_000688 index_sh_000688 2019-12-31 index sh 000688 科创50 2020-07-23 None csindex scope 1000.00 +29 index_sh_931643 index_sh_931643 2019-12-31 index sh 931643 科创创业50 2021-06-01 None csindex scope 1000.00 + +[30 rows x 12 columns] + +``` + +### EntityEvent (交易标的 发生的事件) +有了交易标的,才有交易标的 发生的事。 + +#### 行情数据 +交易标的 **行情schema** 遵从如下的规则: +``` +{entity_shema}{level}{adjust_type}Kdata +``` +* entity_schema + +就是前面说的TradableEntity,比如Stock,Stockus等。 + +* level +``` +>>> for level in IntervalLevel: + print(level.value) +``` + +* adjust type +``` +>>> for adjust_type in AdjustType: + print(adjust_type.value) +``` +> 注意: 为了兼容历史数据,前复权是个例外,{adjust_type}不填 + +前复权 +``` +>>> Stock1dKdata.record_data(code='000338', provider='em') +>>> df = Stock1dKdata.query_data(code='000338', provider='em') +>>> print(df) + + id entity_id timestamp provider code name level open close high low volume turnover change_pct turnover_rate +0 stock_sz_000338_2007-04-30 stock_sz_000338 2007-04-30 None 000338 潍柴动力 1d 2.33 2.00 2.40 1.87 207375.0 1.365189e+09 3.2472 0.1182 +1 stock_sz_000338_2007-05-08 stock_sz_000338 2007-05-08 None 000338 潍柴动力 1d 2.11 1.94 2.20 1.87 86299.0 5.563198e+08 -0.0300 0.0492 +2 stock_sz_000338_2007-05-09 stock_sz_000338 2007-05-09 None 000338 潍柴动力 1d 1.90 1.81 1.94 1.66 93823.0 5.782065e+08 -0.0670 0.0535 +3 stock_sz_000338_2007-05-10 stock_sz_000338 2007-05-10 None 000338 潍柴动力 1d 1.78 1.85 1.98 1.75 47720.0 2.999226e+08 0.0221 0.0272 +4 stock_sz_000338_2007-05-11 stock_sz_000338 2007-05-11 None 000338 潍柴动力 1d 1.81 1.73 1.81 1.66 39273.0 2.373126e+08 -0.0649 0.0224 +... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... +3426 stock_sz_000338_2021-08-27 stock_sz_000338 2021-08-27 None 000338 潍柴动力 1d 19.39 20.30 20.30 19.25 1688497.0 3.370241e+09 0.0601 0.0398 +3427 stock_sz_000338_2021-08-30 stock_sz_000338 2021-08-30 None 000338 潍柴动力 1d 20.30 20.09 20.31 19.78 1187601.0 2.377957e+09 -0.0103 0.0280 +3428 stock_sz_000338_2021-08-31 stock_sz_000338 2021-08-31 None 000338 潍柴动力 1d 20.20 20.07 20.63 19.70 1143985.0 2.295195e+09 -0.0010 0.0270 +3429 stock_sz_000338_2021-09-01 stock_sz_000338 2021-09-01 None 000338 潍柴动力 1d 19.98 19.68 19.98 19.15 1218697.0 2.383841e+09 -0.0194 0.0287 +3430 stock_sz_000338_2021-09-02 stock_sz_000338 2021-09-02 None 000338 潍柴动力 1d 19.71 19.85 19.97 19.24 1023545.0 2.012006e+09 0.0086 0.0241 + +[3431 rows x 15 columns] + +>>> Stockus1dKdata.record_data(code='AAPL', provider='em') +>>> df = Stockus1dKdata.query_data(code='AAPL', provider='em') +>>> print(df) + + id entity_id timestamp provider code name level open close high low volume turnover change_pct turnover_rate +0 stockus_nasdaq_AAPL_1984-09-07 stockus_nasdaq_AAPL 1984-09-07 None AAPL 苹果 1d -5.59 -5.59 -5.58 -5.59 2981600.0 0.000000e+00 0.0000 0.0002 +1 stockus_nasdaq_AAPL_1984-09-10 stockus_nasdaq_AAPL 1984-09-10 None AAPL 苹果 1d -5.59 -5.59 -5.58 -5.59 2346400.0 0.000000e+00 0.0000 0.0001 +2 stockus_nasdaq_AAPL_1984-09-11 stockus_nasdaq_AAPL 1984-09-11 None AAPL 苹果 1d -5.58 -5.58 -5.58 -5.58 5444000.0 0.000000e+00 0.0018 0.0003 +3 stockus_nasdaq_AAPL_1984-09-12 stockus_nasdaq_AAPL 1984-09-12 None AAPL 苹果 1d -5.58 -5.59 -5.58 -5.59 4773600.0 0.000000e+00 -0.0018 0.0003 +4 stockus_nasdaq_AAPL_1984-09-13 stockus_nasdaq_AAPL 1984-09-13 None AAPL 苹果 1d -5.58 -5.58 -5.58 -5.58 7429600.0 0.000000e+00 0.0018 0.0004 +... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... +8765 stockus_nasdaq_AAPL_2021-08-27 stockus_nasdaq_AAPL 2021-08-27 None AAPL 苹果 1d 147.48 148.60 148.75 146.83 55802388.0 8.265452e+09 0.0072 0.0034 +8766 stockus_nasdaq_AAPL_2021-08-30 stockus_nasdaq_AAPL 2021-08-30 None AAPL 苹果 1d 149.00 153.12 153.49 148.61 90956723.0 1.383762e+10 0.0304 0.0055 +8767 stockus_nasdaq_AAPL_2021-08-31 stockus_nasdaq_AAPL 2021-08-31 None AAPL 苹果 1d 152.66 151.83 152.80 151.29 86453117.0 1.314255e+10 -0.0084 0.0052 +8768 stockus_nasdaq_AAPL_2021-09-01 stockus_nasdaq_AAPL 2021-09-01 None AAPL 苹果 1d 152.83 152.51 154.98 152.34 80313711.0 1.235321e+10 0.0045 0.0049 +8769 stockus_nasdaq_AAPL_2021-09-02 stockus_nasdaq_AAPL 2021-09-02 None AAPL 苹果 1d 153.87 153.65 154.72 152.40 71171317.0 1.093251e+10 0.0075 0.0043 + +[8770 rows x 15 columns] +``` +后复权 +``` +>>> Stock1dHfqKdata.record_data(code='000338', provider='em') +>>> df = Stock1dHfqKdata.query_data(code='000338', provider='em') +>>> print(df) + + id entity_id timestamp provider code name level open close high low volume turnover change_pct turnover_rate +0 stock_sz_000338_2007-04-30 stock_sz_000338 2007-04-30 None 000338 潍柴动力 1d 70.00 64.93 71.00 62.88 207375.0 1.365189e+09 2.1720 0.1182 +1 stock_sz_000338_2007-05-08 stock_sz_000338 2007-05-08 None 000338 潍柴动力 1d 66.60 64.00 68.00 62.88 86299.0 5.563198e+08 -0.0143 0.0492 +2 stock_sz_000338_2007-05-09 stock_sz_000338 2007-05-09 None 000338 潍柴动力 1d 63.32 62.00 63.88 59.60 93823.0 5.782065e+08 -0.0313 0.0535 +3 stock_sz_000338_2007-05-10 stock_sz_000338 2007-05-10 None 000338 潍柴动力 1d 61.50 62.49 64.48 61.01 47720.0 2.999226e+08 0.0079 0.0272 +4 stock_sz_000338_2007-05-11 stock_sz_000338 2007-05-11 None 000338 潍柴动力 1d 61.90 60.65 61.90 59.70 39273.0 2.373126e+08 -0.0294 0.0224 +... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... +3426 stock_sz_000338_2021-08-27 stock_sz_000338 2021-08-27 None 000338 潍柴动力 1d 331.97 345.95 345.95 329.82 1688497.0 3.370241e+09 0.0540 0.0398 +3427 stock_sz_000338_2021-08-30 stock_sz_000338 2021-08-30 None 000338 潍柴动力 1d 345.95 342.72 346.10 337.96 1187601.0 2.377957e+09 -0.0093 0.0280 +3428 stock_sz_000338_2021-08-31 stock_sz_000338 2021-08-31 None 000338 潍柴动力 1d 344.41 342.41 351.02 336.73 1143985.0 2.295195e+09 -0.0009 0.0270 +3429 stock_sz_000338_2021-09-01 stock_sz_000338 2021-09-01 None 000338 潍柴动力 1d 341.03 336.42 341.03 328.28 1218697.0 2.383841e+09 -0.0175 0.0287 +3430 stock_sz_000338_2021-09-02 stock_sz_000338 2021-09-02 None 000338 潍柴动力 1d 336.88 339.03 340.88 329.67 1023545.0 2.012006e+09 0.0078 0.0241 + +[3431 rows x 15 columns] +``` + +#### 财务因子 +``` +>>> FinanceFactor.record_data(code='000338') +>>> FinanceFactor.query_data(code='000338',columns=FinanceFactor.important_cols(),index='timestamp') + + basic_eps total_op_income net_profit op_income_growth_yoy net_profit_growth_yoy roe rota gross_profit_margin net_margin timestamp +timestamp +2002-12-31 NaN 1.962000e+07 2.471000e+06 NaN NaN NaN NaN 0.2068 0.1259 2002-12-31 +2003-12-31 1.27 3.574000e+09 2.739000e+08 181.2022 109.8778 0.7729 0.1783 0.2551 0.0766 2003-12-31 +2004-12-31 1.75 6.188000e+09 5.369000e+08 0.7313 0.9598 0.3245 0.1474 0.2489 0.0868 2004-12-31 +2005-12-31 0.93 5.283000e+09 3.065000e+08 -0.1463 -0.4291 0.1327 0.0603 0.2252 0.0583 2005-12-31 +2006-03-31 0.33 1.859000e+09 1.079000e+08 NaN NaN NaN NaN NaN 0.0598 2006-03-31 +... ... ... ... ... ... ... ... ... ... ... +2020-08-28 0.59 9.449000e+10 4.680000e+09 0.0400 -0.1148 0.0983 0.0229 0.1958 0.0603 2020-08-28 +2020-10-31 0.90 1.474000e+11 7.106000e+09 0.1632 0.0067 0.1502 0.0347 0.1949 0.0590 2020-10-31 +2021-03-31 1.16 1.975000e+11 9.207000e+09 0.1327 0.0112 0.1919 0.0444 0.1931 0.0571 2021-03-31 +2021-04-30 0.42 6.547000e+10 3.344000e+09 0.6788 0.6197 0.0622 0.0158 0.1916 0.0667 2021-04-30 +2021-08-31 0.80 1.264000e+11 6.432000e+09 0.3375 0.3742 0.1125 0.0287 0.1884 0.0653 2021-08-31 + +[66 rows x 10 columns] +``` + +#### 财务三张表 +``` +#资产负债表 +>>> BalanceSheet.record_data(code='000338') +#利润表 +>>> IncomeStatement.record_data(code='000338') +#现金流量表 +>>> CashFlowStatement.record_data(code='000338') +``` + +#### 还有更多 +``` +>>> zvt_context.schemas +[zvt.domain.dividend_financing.DividendFinancing, + zvt.domain.dividend_financing.DividendDetail, + zvt.domain.dividend_financing.SpoDetail...] +``` + +zvt_context.schemas为系统支持的schema,schema即表结构,即数据,其字段含义的查看方式如下: + +* help + +输入schema.按tab提示其包含的字段,或者.help() +``` +>>> FinanceFactor.help() +``` + +* 源码 + +[domain](https://github.com/zvtvz/zvt/tree/master/src/zvt/domain)里的文件为schema的定义,查看相应字段的注释即可。 + +通过以上的例子,你应该掌握了统一的记录数据的方法: + +> Schema.record_data(provider='your provider',codes='the codes') + +注意可选参数provider,其代表数据提供商,一个schema可以有多个provider,这是系统稳定的基石。 + +查看**已实现**的provider +``` +>>> Stock.provider_map_recorder +{'joinquant': zvt.recorders.joinquant.meta.jq_stock_meta_recorder.JqChinaStockRecorder, + 'exchange': zvt.recorders.exchange.exchange_stock_meta_recorder.ExchangeStockMetaRecorder, + 'em': zvt.recorders.em.meta.em_stock_meta_recorder.EMStockRecorder, + 'eastmoney': zvt.recorders.eastmoney.meta.eastmoney_stock_meta_recorder.EastmoneyChinaStockListRecorder} + +``` +你可以使用任意一个provider来获取数据,默认使用第一个。 + +再举个例子,股票板块数据获取: +``` +>>> Block.provider_map_recorder +{'eastmoney': zvt.recorders.eastmoney.meta.eastmoney_block_meta_recorder.EastmoneyChinaBlockRecorder, + 'sina': zvt.recorders.sina.meta.sina_block_recorder.SinaBlockRecorder} + +>>> Block.record_data(provider='sina') +Block registered recorders:{'eastmoney': , 'sina': } +2020-03-04 23:56:48,931 INFO MainThread finish record sina blocks:industry +2020-03-04 23:56:49,450 INFO MainThread finish record sina blocks:concept +``` + +再多了解一点record_data: +* 参数code[单个],codes[多个]代表需要抓取的股票代码 +* 不传入code,codes则是全市场抓取 +* 该方法会把数据存储到本地并只做增量更新 + +定时任务的方式更新可参考[定时更新](https://github.com/zvtvz/zvt/blob/master/examples/data_runner) + +#### 全市场选股 +查询数据使用的是query_data方法,把全市场的数据记录下来后,就可以在本地快速查询需要的数据了。 + +一个例子:2018年年报 roe>8% 营收增长>8% 的前20个股 +``` +>>> df=FinanceFactor.query_data(filters=[FinanceFactor.roe>0.08,FinanceFactor.report_period=='year',FinanceFactor.op_income_growth_yoy>0.08],start_timestamp='2019-01-01',order=FinanceFactor.roe.desc(),limit=20,columns=["code"]+FinanceFactor.important_cols(),index='code') + + code basic_eps total_op_income net_profit op_income_growth_yoy net_profit_growth_yoy roe rota gross_profit_margin net_margin timestamp +code +000048 000048 2.7350 4.919000e+09 1.101000e+09 0.4311 1.5168 0.7035 0.1988 0.5243 0.2355 2020-04-30 +000912 000912 0.3500 4.405000e+09 3.516000e+08 0.1796 1.2363 4.7847 0.0539 0.2175 0.0795 2019-03-20 +002207 002207 0.2200 3.021000e+08 5.189000e+07 0.1600 1.1526 1.1175 0.1182 0.1565 0.1718 2020-04-27 +002234 002234 5.3300 3.276000e+09 1.610000e+09 0.8023 3.2295 0.8361 0.5469 0.5968 0.4913 2020-04-21 +002458 002458 3.7900 3.584000e+09 2.176000e+09 1.4326 4.9973 0.8318 0.6754 0.6537 0.6080 2020-02-20 +... ... ... ... ... ... ... ... ... ... ... ... +600701 600701 -3.6858 7.830000e+08 -3.814000e+09 1.3579 -0.0325 1.9498 -0.7012 0.4173 -4.9293 2020-04-29 +600747 600747 -1.5600 3.467000e+08 -2.290000e+09 2.1489 -0.4633 3.1922 -1.5886 0.0378 -6.6093 2020-06-30 +600793 600793 1.6568 1.293000e+09 1.745000e+08 0.1164 0.8868 0.7490 0.0486 0.1622 0.1350 2019-04-30 +600870 600870 0.0087 3.096000e+07 4.554000e+06 0.7773 1.3702 0.7458 0.0724 0.2688 0.1675 2019-03-30 +688169 688169 15.6600 4.205000e+09 7.829000e+08 0.3781 1.5452 0.7172 0.4832 0.3612 0.1862 2020-04-28 + +[20 rows x 11 columns] +``` + +以上,你应该会回答如下的三个问题了: +* 有什么数据? +* 如何记录数据? +* 如何查询数据? + +更高级的用法以及扩展数据,可以参考详细文档里的数据部分。 + +### 写个策略 +有了 **交易标的** 和 **交易标的发生的事**,就可以写策略了。 + +所谓策略回测,无非就是,重复以下过程: +#### 在某时间点,找到符合条件的标的,对其进行买卖,看其表现。 + +系统支持两种模式: +* solo (随意的) + +在 某个时间 根据发生的事件 计算条件 并买卖 + +* formal (正式的) + +系统设计的二维索引多标的计算模型 + +#### 一个很随便的人(solo) +嗯,这个策略真的很随便,就像我们大部分时间做的那样。 +> 报表出来的时,我看一下报表,机构加仓超过5%我就买入,机构减仓超过50%我就卖出。 + +代码如下: +``` +# -*- coding: utf-8 -*- +import pandas as pd + +from zvt.api import get_recent_report_date +from zvt.contract import ActorType, AdjustType +from zvt.domain import StockActorSummary, Stock1dKdata +from zvt.trader import StockTrader +from zvt.utils import pd_is_not_null, is_same_date, to_pd_timestamp + + +class FollowIITrader(StockTrader): + finish_date = None + + def on_time(self, timestamp: pd.Timestamp): + recent_report_date = to_pd_timestamp(get_recent_report_date(timestamp)) + if self.finish_date and is_same_date(recent_report_date, self.finish_date): + return + filters = [StockActorSummary.actor_type == ActorType.raised_fund.value, + StockActorSummary.report_date == recent_report_date] + + if self.entity_ids: + filters = filters + [StockActorSummary.entity_id.in_(self.entity_ids)] + + df = StockActorSummary.query_data(filters=filters) + + if pd_is_not_null(df): + self.logger.info(f'{df}') + self.finish_date = recent_report_date + + long_df = df[df['change_ratio'] > 0.05] + short_df = df[df['change_ratio'] < -0.5] + try: + self.trade_the_targets(due_timestamp=timestamp, happen_timestamp=timestamp, + long_selected=set(long_df['entity_id'].to_list()), + short_selected=set(short_df['entity_id'].to_list())) + except Exception as e: + self.logger.error(e) + + +if __name__ == '__main__': + entity_id = 'stock_sh_600519' + Stock1dKdata.record_data(entity_id=entity_id, provider='em') + StockActorSummary.record_data(entity_id=entity_id, provider='em') + FollowIITrader(start_timestamp='2002-01-01', end_timestamp='2021-01-01', entity_ids=[entity_id], + provider='em', adjust_type=AdjustType.qfq, profit_threshold=None).run() +``` + +所以,写一个策略其实还是很简单的嘛。 +你可以发挥想象力,社保重仓买买买,外资重仓买买买,董事长跟小姨子跑了卖卖卖...... + +然后,刷新一下[http://127.0.0.1:8050/](http://127.0.0.1:8050/),看你运行策略的performance + +更多可参考[策略例子](https://github.com/zvtvz/zvt/tree/master/examples/trader) + +#### 严肃一点(formal) +简单的计算可以通过query_data来完成,这里说的是系统设计的二维索引多标的计算模型。 + +下面以技术因子为例对**计算流程**进行说明: +``` +In [7]: from zvt.factors import * +In [8]: factor = BullFactor(codes=['000338','601318'],start_timestamp='2019-01-01',end_timestamp='2019-06-10', transformer=MacdTransformer(count_live_dead=True)) +``` +### data_df +data_df为factor的原始数据,即通过query_data从数据库读取到的数据,为一个**二维索引**DataFrame +``` +In [11]: factor.data_df +Out[11]: + level high id entity_id open low timestamp close +entity_id timestamp +stock_sh_601318 2019-01-02 1d 54.91 stock_sh_601318_2019-01-02 stock_sh_601318 54.78 53.70 2019-01-02 53.94 + 2019-01-03 1d 55.06 stock_sh_601318_2019-01-03 stock_sh_601318 53.91 53.82 2019-01-03 54.42 + 2019-01-04 1d 55.71 stock_sh_601318_2019-01-04 stock_sh_601318 54.03 53.98 2019-01-04 55.31 + 2019-01-07 1d 55.88 stock_sh_601318_2019-01-07 stock_sh_601318 55.80 54.64 2019-01-07 55.03 + 2019-01-08 1d 54.83 stock_sh_601318_2019-01-08 stock_sh_601318 54.79 53.96 2019-01-08 54.54 +... ... ... ... ... ... ... ... ... +stock_sz_000338 2019-06-03 1d 11.04 stock_sz_000338_2019-06-03 stock_sz_000338 10.93 10.74 2019-06-03 10.81 + 2019-06-04 1d 10.85 stock_sz_000338_2019-06-04 stock_sz_000338 10.84 10.57 2019-06-04 10.73 + 2019-06-05 1d 10.92 stock_sz_000338_2019-06-05 stock_sz_000338 10.87 10.59 2019-06-05 10.59 + 2019-06-06 1d 10.71 stock_sz_000338_2019-06-06 stock_sz_000338 10.59 10.49 2019-06-06 10.65 + 2019-06-10 1d 11.05 stock_sz_000338_2019-06-10 stock_sz_000338 10.73 10.71 2019-06-10 11.02 + +[208 rows x 8 columns] +``` + +### factor_df +factor_df为transformer对data_df进行计算后得到的数据,设计因子即对[transformer](https://github.com/zvtvz/zvt/blob/master/src/zvt/contract/factor.py#L34)进行扩展,例子中用的是MacdTransformer()。 + +``` +In [12]: factor.factor_df +Out[12]: + level high id entity_id open low timestamp close diff dea macd +entity_id timestamp +stock_sh_601318 2019-01-02 1d 54.91 stock_sh_601318_2019-01-02 stock_sh_601318 54.78 53.70 2019-01-02 53.94 NaN NaN NaN + 2019-01-03 1d 55.06 stock_sh_601318_2019-01-03 stock_sh_601318 53.91 53.82 2019-01-03 54.42 NaN NaN NaN + 2019-01-04 1d 55.71 stock_sh_601318_2019-01-04 stock_sh_601318 54.03 53.98 2019-01-04 55.31 NaN NaN NaN + 2019-01-07 1d 55.88 stock_sh_601318_2019-01-07 stock_sh_601318 55.80 54.64 2019-01-07 55.03 NaN NaN NaN + 2019-01-08 1d 54.83 stock_sh_601318_2019-01-08 stock_sh_601318 54.79 53.96 2019-01-08 54.54 NaN NaN NaN +... ... ... ... ... ... ... ... ... ... ... ... +stock_sz_000338 2019-06-03 1d 11.04 stock_sz_000338_2019-06-03 stock_sz_000338 10.93 10.74 2019-06-03 10.81 -0.121336 -0.145444 0.048215 + 2019-06-04 1d 10.85 stock_sz_000338_2019-06-04 stock_sz_000338 10.84 10.57 2019-06-04 10.73 -0.133829 -0.143121 0.018583 + 2019-06-05 1d 10.92 stock_sz_000338_2019-06-05 stock_sz_000338 10.87 10.59 2019-06-05 10.59 -0.153260 -0.145149 -0.016223 + 2019-06-06 1d 10.71 stock_sz_000338_2019-06-06 stock_sz_000338 10.59 10.49 2019-06-06 10.65 -0.161951 -0.148509 -0.026884 + 2019-06-10 1d 11.05 stock_sz_000338_2019-06-10 stock_sz_000338 10.73 10.71 2019-06-10 11.02 -0.137399 -0.146287 0.017776 + +[208 rows x 11 columns] +``` + +### result_df +result_df为可用于选股器的**二维索引**DataFrame,通过对data_df或factor_df计算来实现。 +该例子在计算macd之后,利用factor_df,黄白线在0轴上为True,否则为False,[具体代码](https://github.com/zvtvz/zvt/blob/master/src/zvt/factors/technical_factor.py#L56) + +``` +In [14]: factor.result_df +Out[14]: + score +entity_id timestamp +stock_sh_601318 2019-01-02 False + 2019-01-03 False + 2019-01-04 False + 2019-01-07 False + 2019-01-08 False +... ... +stock_sz_000338 2019-06-03 False + 2019-06-04 False + 2019-06-05 False + 2019-06-06 False + 2019-06-10 False + +[208 rows x 1 columns] +``` + +result_df的格式如下: + + +

+ +filter_result 为 True 或 False, score_result 取值为 0 到 1。 + + +结合选股器和回测,整个流程如下: +

+ +## 环境设置(可选) +``` +>>> from zvt import * +>>> zvt_env +{'zvt_home': '/Users/foolcage/zvt-home', + 'data_path': '/Users/foolcage/zvt-home/data', + 'tmp_path': '/Users/foolcage/zvt-home/tmp', + 'ui_path': '/Users/foolcage/zvt-home/ui', + 'log_path': '/Users/foolcage/zvt-home/logs'} + +>>> zvt_config +``` + +* jq_username 聚宽数据用户名 +* jq_password 聚宽数据密码 +* smtp_host 邮件服务器host +* smtp_port 邮件服务器端口 +* email_username smtp邮箱账户 +* email_password smtp邮箱密码 +* wechat_app_id +* wechat_app_secrect + +``` +>>> init_config(current_config=zvt_config, jq_username='xxx', jq_password='yyy') +``` +> 通用的配置方式为: init_config(current_config=zvt_config, **kv) + +### 历史数据 + +ZVT支持数据增量更新,用户之间可以共享历史数据,这样可以节省很多时间。 + +#### 数据源 +> 新UI实时行情的计算基于QMT数据源,需要开通的同学可联系作者。 + +项目数据支持多provider,在数据schema一致性的基础上,可根据需要进行选择和扩展,目前支持新浪,东财,交易所等免费数据。 + +#### 数据的设计上是让provider来适配schema,而不是反过来,这样即使某provider不可用了,换一个即可,不会影响整个系统的使用。 + +但免费数据的缺点是显而易见的:不稳定,爬取清洗数据耗时耗力,维护代价巨大,且随时可能不可用。 +个人建议:如果只是学习研究,可以使用免费数据;如果是真正有意投身量化,还是选一家可靠的数据提供商。 + + +> 项目中大部分的免费数据目前都是比较稳定的,且做过严格测试,特别是东财的数据,可放心使用 + +> 添加其他数据提供商, 请参考[数据扩展教程](https://zvtvz.github.io/zvt/#/data_extending) + +## 开发 + +### clone代码 + +``` +git clone https://github.com/zvtvz/zvt.git +``` + +设置项目的virtual env(python>=3.6),安装依赖 +``` +pip3 install -r requirements.txt +pip3 install pytest +``` + +### 测试案例 +pycharm导入工程(推荐,你也可以使用其他ide),然后pytest跑测试案例 + +

+ +大部分功能使用都可以从tests里面参考 + +## 贡献 +期待能有更多的开发者参与到 zvt 的开发中来,我会保证尽快 Reivew PR 并且及时回复。但提交 PR 请确保 + +先看一下[1分钟代码规范](https://github.com/zvtvz/zvt/blob/master/code_of_conduct.md) + +1. 通过所有单元测试,如若是新功能,请为其新增单元测试 +2. 遵守开发规范 +3. 如若需要,请更新相对应的文档 + +也非常欢迎开发者能为 zvt 提供更多的示例,共同来完善文档。 + +## 请作者喝杯咖啡 + +如果你觉得项目对你有帮助,可以请作者喝杯咖啡 +Alipay      +Wechat + +## 联系方式 + +加微信进群:foolcage 添加暗号:zvt +Wechat + +------ +微信公众号: +Wechat + +知乎专栏: +https://zhuanlan.zhihu.com/automoney + +## Thanks +

jetbrains

diff --git a/README-en.md b/README-en.md deleted file mode 100644 index 92622de3..00000000 --- a/README-en.md +++ /dev/null @@ -1,155 +0,0 @@ -[![image](https://img.shields.io/pypi/v/zvt.svg)](https://pypi.org/project/zvt/) -[![image](https://img.shields.io/pypi/l/zvt.svg)](https://pypi.org/project/zvt/) -[![image](https://img.shields.io/pypi/pyversions/zvt.svg)](https://pypi.org/project/zvt/) -[![Build Status](https://api.travis-ci.org/zvtvz/zvt.svg?branch=master)](https://travis-ci.org/zvtvz/zvt) -[![codecov.io](https://codecov.io/github/zvtvz/zvt/coverage.svg?branch=master)](https://codecov.io/github/zvtvz/zvt) -[![Downloads](https://pepy.tech/badge/zvt/month)](https://pepy.tech/project/zvt) - -**Read this in other languages: [English](README-en.md).** - -ZVT is a quant trading platform written after rethinking trading based on [fooltrader](https://github.com/foolcage/fooltrader), -which includes scalable data recorder, api, factor calculation, stock picking, backtesting, and unified visualization layer, -focus on **low frequency**, **multi-level**, **multi-targets** full market analysis and trading framework. - -## 🔖 Useage examples - -### Sector fund flow analysis -

- -### Market history pe distribution -

- -### Multi-market pe comparison -

- -### Margin financing trend -

- -### Foreign capital flow (Shanghai / Shenzhen-Hong Kong Stock Connect) -

- -### Dividend comparison -

- -### Dividend financing comparison -

- -### Balance sheet analysis -

- -### Custom factor -

- - -## examples -[*code examples*](./zvt/trader/examples) -### Algorithmic trading signals and performance -

- -### Multi-targets transaction -

- -### real time digital currency trading -

- -### stock picker -

- -

- -The entire framework is highly scalable, and only needs to write very little code to expand each module. The extended tutorials for each module are to be improved, and waiting for your contribution. - - -## ✨ Features - -- **Enrich comprehensive out-of-the-box and updatable data** - - china market data: stock meta, financial statements, major shareholder behavior, executive transactions, dividend financing details, stock market capital flow, margin financing, dragon and tiger charts, etc. - - Market pe, pb, capital flow, margin financing, foreign investment trends, etc. - - crypto currency data -- Standardization of data, multi-data source (provider) cross-validation, completion -- **Simple and extensible data framework** -- **Uniform and simple API, support sql query, support pandas** -- Scalable factor, abstracting a unified calculation for single- and multi-targets operations -- Provides a unified way of visualizing the factor -- **Concise and unified visual analysis method** -- Support multi-targets, multi-factor, multi-level backtesting -- Real-time visualization of factors, trading signals and strategies -- Support a variety of real trading (implementation) - -## 🔰 install - -### quick start -#### 1.clone - -``` -git clone https://github.com/zvtvz/zvt.git -``` - -setup virtual env(python>=3.6),install requirements -``` -pip3 install -r requirements.txt -``` - -#### 2.import the project(pycharm is good for it,vscode is ok too) - -unzip data sample for the tests -``` -python3 init_data_sample.py - -pytest tests -``` - -#### 3.download the data and run -change DATA_PATH(default datasample is just for testing) -``` -DATA_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'data')) -``` - -eastmoney: https://pan.baidu.com/s/1CMAlCRYwlhGVxS6drYUEgA 提取码: q2qn -netease: https://pan.baidu.com/s/1kMhEVO0kH_Pn6wXKyqvJEA 提取码: ijxg -sina: https://pan.baidu.com/s/1eusW65sdK_WE4icnt8JS1g 提取码: uux3 -joinquant: https://pan.baidu.com/s/1ijrgjUd1WkRMONrwRQU-4w 提取码: dipd - -unzip them to DATA_PATH - -run the main entry app -``` -python3 index.py -``` - -for updating the data,please refer to [recorders](./zvt/recorders) - -## detailed documentation - -[http://zvt.foolcage.com](http://zvt.foolcage.com) -[https://zvtvz.github.io/zvt](https://zvtvz.github.io/zvt) -> docs is waiting for your contribution,especially english docs - -## 💌 buy me a cup of coffee - -If you think the project is helpful to you, you can buy me a cup of coffee -Alipay      -Wechat - - -## 💡 contribution - -Looking forward to more developers participating in the development of zvt, I will promise Reivew PR as soon as possible and respond promptly. But submit PR please make sure - -1. Pass all unit tests, if it is new, please add unit test to it -2. Compliance with development specifications -3. Update the corresponding document if needed - -Developers are also welcome to provide more examples for zvt to complement the documentation, located at [zvt/docs] (https://github.com/zvtvz/zvt/docs) - - -## Contact information -QQ group:300911873 -check http://www.imqq.com/html/FAQ_en/html/Discussions_3.html - - -wechat Public number (some tutorials would be here): -Wechat - -## Thanks -

jetbrains

diff --git a/README.md b/README.md index 93bdf1cc..f965d98e 100644 --- a/README.md +++ b/README.md @@ -2,365 +2,535 @@ [![image](https://img.shields.io/pypi/v/zvt.svg)](https://pypi.org/project/zvt/) [![image](https://img.shields.io/pypi/l/zvt.svg)](https://pypi.org/project/zvt/) [![image](https://img.shields.io/pypi/pyversions/zvt.svg)](https://pypi.org/project/zvt/) -[![Build Status](https://api.travis-ci.org/zvtvz/zvt.svg?branch=master)](https://travis-ci.org/zvtvz/zvt) +[![build](https://github.com/zvtvz/zvt/actions/workflows/build.yaml/badge.svg)](https://github.com/zvtvz/zvt/actions/workflows/build.yml) +[![package](https://github.com/zvtvz/zvt/actions/workflows/package.yaml/badge.svg)](https://github.com/zvtvz/zvt/actions/workflows/package.yaml) +[![Documentation Status](https://readthedocs.org/projects/zvt/badge/?version=latest)](https://zvt.readthedocs.io/en/latest/?badge=latest) [![codecov.io](https://codecov.io/github/zvtvz/zvt/coverage.svg?branch=master)](https://codecov.io/github/zvtvz/zvt) [![Downloads](https://pepy.tech/badge/zvt/month)](https://pepy.tech/project/zvt) -**Read this in other languages: [English](README-en.md).** +**The origin of ZVT** -ZVT是对[fooltrader](https://github.com/foolcage/fooltrader)重新思考后编写的量化项目,其包含可扩展的交易标的,数据recorder,api,因子计算,选股,回测,交易,以及统一的可视化,定位为**中低频** **多级别** **多因子** **多标的** 全市场分析和交易框架。 +[The Three Major Principles of Stock Trading](https://mp.weixin.qq.com/s/FoFR63wFSQIE_AyFubkZ6Q) -相比其他的量化系统,其不依赖任何中间件,**非常轻,可测试,可推断,可扩展**。 +**Declaration** -编写该系统的初心: -* 构造统一可扩展的数据schema -* 能够容易地把各provider的数据适配到系统 -* 相同的算法,只写一次,可以应用到任何市场 -* 适用于低耗能的人脑+个人电脑 +This project does not currently guarantee any backward compatibility, so please upgrade with caution. +As the author's thoughts evolve, some things that were once considered important may become less so, and thus may not be maintained. +Whether the addition of some new elements will be useful to you needs to be assessed by yourself. -## 详细文档 -[https://zvtvz.github.io/zvt](https://zvtvz.github.io/zvt) +**Read this in other languages: [中文](README-cn.md).** ->详细文档有部分已落后代码不少,其实认真看完README并结合代码理解下面的几句话,基本上不需要什么文档了 +**Read the docs:[https://zvt.readthedocs.io/en/latest/](https://zvt.readthedocs.io/en/latest/)** -* ### entity抽象了 *交易标的* 不变的东西 -* ### 数据就是entity和entity发生的event,数据即api,数据即策略 -* ### 数据是可插入的,发信号到哪是可插入的 +### Install +``` +python3 -m pip install -U zvt +``` -### 架构图: -

+### Main ui -### 扩展应用例子 +#### Dash & Plotly UI -[数字货币插件](https://github.com/zvtvz/zvt-ccxt) +> It's good for backtest and research, but it is not applicable for real-time market data and user interaction. -[定时选股推送](https://github.com/zvtvz/zvt/issues/48) +After the installation is complete, enter zvt on the command line +```shell +zvt +``` +open [http://127.0.0.1:8050/](http://127.0.0.1:8050/) -> 目前主干代码提供的标的类型为A股,其他标的可以通过plugin的方式来实现 +> The example shown here relies on data, factor, trader, please read [docs](https://zvt.readthedocs.io/en/latest/) -### 策略例子 +

+

-[日线策略](https://github.com/zvtvz/zvt/blob/master/examples/trader/macd_day_trader.py.py) +> The core concept of the system is visual, and the name of the interface corresponds to it one-to-one, so it is also uniform and extensible. -[多级别策略](https://github.com/zvtvz/zvt/blob/master/examples/trader/macd_week_and_day_trader.py) +> You can write and run the strategy in your favorite ide, and then view its related targets, factor, signal and performance on the UI. -[分段持续策略](https://github.com/zvtvz/zvt/blob/master/examples/trader/keep_run_trader.py.py) +#### Rest api and standalone UI +> It is more flexible and more scalable, more suitable for handling real-time market data and user interaction. +> Combined with the dynamic tag system provided by ZVT, it offers a trading approach that combines AI with human intervention. -> 可通过自定义策略中的回调函数来控制策略运行的逻辑 +- Init tag system -### 运行界面 +run following scripts: -这里是[入口脚本](https://github.com/zvtvz/zvt/blob/master/zvt/main.py),可直接源码运行;或者pip安装后直接在命令行下输入zvt,然后打开[http://127.0.0.1:8050/](http://127.0.0.1:8050/)即可。 +https://github.com/zvtvz/zvt/blob/master/src/zvt/tasks/init_tag_system.py +https://github.com/zvtvz/zvt/blob/master/src/zvt/tasks/stock_pool_runner.py +https://github.com/zvtvz/zvt/blob/master/src/zvt/tasks/qmt_data_runner.py +https://github.com/zvtvz/zvt/blob/master/src/zvt/tasks/qmt_tick_runner.py -> 这里展示的例子依赖后面的下载历史数据,数据更新请参考后面文档 +- Install uvicorn +```shell +pip install uvicorn +``` +- Run zvt server -

-

+After the installation is complete, enter zvt_server on the command line +```shell +zvt_server +``` +Or run it from source code: +https://github.com/zvtvz/zvt/blob/master/src/zvt/zvt_server.py -> 系统的核心概念是可视化的,界面的名称与其一一对应,因此也是统一可扩展的。 +- Check the api docs -> 你可以在你喜欢的ide里编写和运行策略,然后运行界面查看其相关的标的,因子,信号和净值展示。 +open [http://127.0.0.1:8090/docs](http://127.0.0.1:8090/docs) -### 交易接口 +- Deploy the front end service -> zvt旨在帮你更好的理解市场,理清交易思路,验证想法,实盘交易接口可以通过插件的方式来连接交易信号,并不是zvt核心的东西。 +Front end source code: https://github.com/zvtvz/zvt_ui -## 1. 🔖5分钟用起来 +Change the env file: +https://github.com/zvtvz/zvt_ui/blob/main/.env ->一个系统,如果5分钟用不起来,那肯定是设计软件的人本身就没想清楚,并且其压根就没打算自己用。 +Set {your server IP} to zvt_server IP + +```text +NEXT_PUBLIC_SERVER = {your server IP} +``` -### 1.1 安装 +Then refer to the frontend's README to start the frontend service. -要求python版本>=3.6(建议新建一个干净的virtual env环境) +open [http://127.0.0.1:3000/trade](http://127.0.0.1:3000/trade) + +

+ +### Behold, the power of zvt: +``` +>>> from zvt.domain import Stock, Stock1dHfqKdata +>>> from zvt.ml import MaStockMLMachine +>>> Stock.record_data(provider="em") +>>> entity_ids = ["stock_sz_000001", "stock_sz_000338", "stock_sh_601318"] +>>> Stock1dHfqKdata.record_data(provider="em", entity_ids=entity_ids, sleeping_time=1) +>>> machine = MaStockMLMachine(entity_ids=["stock_sz_000001"], data_provider="em") +>>> machine.train() +>>> machine.predict() +>>> machine.draw_result(entity_id="stock_sz_000001") ``` -pip install zvt -i http://pypi.douban.com/simple --trusted-host pypi.douban.com +

-pip show zvt +> The few lines of code above has done: data capture, persistence, incremental update, machine learning, prediction, and display results. +> Once you are familiar with the core concepts of the system, you can apply it to any target in the market. + +### Data + +#### China stock ``` +>>> from zvt.domain import * +>>> Stock.record_data(provider="em") +>>> df = Stock.query_data(provider="em", index='code') +>>> print(df) + + id entity_id timestamp entity_type exchange code name list_date end_date +code +000001 stock_sz_000001 stock_sz_000001 1991-04-03 stock sz 000001 平安银行 1991-04-03 None +000002 stock_sz_000002 stock_sz_000002 1991-01-29 stock sz 000002 万 科A 1991-01-29 None +000004 stock_sz_000004 stock_sz_000004 1990-12-01 stock sz 000004 国华网安 1990-12-01 None +000005 stock_sz_000005 stock_sz_000005 1990-12-10 stock sz 000005 世纪星源 1990-12-10 None +000006 stock_sz_000006 stock_sz_000006 1992-04-27 stock sz 000006 深振业A 1992-04-27 None +... ... ... ... ... ... ... ... ... ... +605507 stock_sh_605507 stock_sh_605507 2021-08-02 stock sh 605507 国邦医药 2021-08-02 None +605577 stock_sh_605577 stock_sh_605577 2021-08-24 stock sh 605577 龙版传媒 2021-08-24 None +605580 stock_sh_605580 stock_sh_605580 2021-08-19 stock sh 605580 恒盛能源 2021-08-19 None +605588 stock_sh_605588 stock_sh_605588 2021-08-12 stock sh 605588 冠石科技 2021-08-12 None +605589 stock_sh_605589 stock_sh_605589 2021-08-10 stock sh 605589 圣泉集团 2021-08-10 None -如果不是最新版本 +[4136 rows x 9 columns] ``` -pip install --upgrade zvt -i http://pypi.douban.com/simple --trusted-host pypi.douban.com + +#### USA stock ``` +>>> Stockus.record_data() +>>> df = Stockus.query_data(index='code') +>>> print(df) -> 请根据需要决定是否使用豆瓣镜像源 + id entity_id timestamp entity_type exchange code name list_date end_date +code +A stockus_nyse_A stockus_nyse_A NaT stockus nyse A 安捷伦 None None +AA stockus_nyse_AA stockus_nyse_AA NaT stockus nyse AA 美国铝业 None None +AAC stockus_nyse_AAC stockus_nyse_AAC NaT stockus nyse AAC Ares Acquisition Corp-A None None +AACG stockus_nasdaq_AACG stockus_nasdaq_AACG NaT stockus nasdaq AACG ATA Creativity Global ADR None None +AACG stockus_nyse_AACG stockus_nyse_AACG NaT stockus nyse AACG ATA Creativity Global ADR None None +... ... ... ... ... ... ... ... ... ... +ZWRK stockus_nasdaq_ZWRK stockus_nasdaq_ZWRK NaT stockus nasdaq ZWRK Z-Work Acquisition Corp-A None None +ZY stockus_nasdaq_ZY stockus_nasdaq_ZY NaT stockus nasdaq ZY Zymergen Inc None None +ZYME stockus_nyse_ZYME stockus_nyse_ZYME NaT stockus nyse ZYME Zymeworks Inc None None +ZYNE stockus_nasdaq_ZYNE stockus_nasdaq_ZYNE NaT stockus nasdaq ZYNE Zynerba Pharmaceuticals Inc None None +ZYXI stockus_nasdaq_ZYXI stockus_nasdaq_ZYXI NaT stockus nasdaq ZYXI Zynex Inc None None +[5826 rows x 9 columns] -### 1.2 进入ipython,体验一把 +>>> Stockus.query_data(code='AAPL') + id entity_id timestamp entity_type exchange code name list_date end_date +0 stockus_nasdaq_AAPL stockus_nasdaq_AAPL None stockus nasdaq AAPL 苹果 None None +``` -#### k线数据 +#### Hong Kong stock ``` -In [1]: import os +>>> Stockhk.record_data() +>>> df = Stockhk.query_data(index='code') +>>> print(df) -#这一句会进入测试环境,使用自带的测试数据 -In [2]: os.environ["TESTING_ZVT"] = "1" + id entity_id timestamp entity_type exchange code name list_date end_date +code +00001 stockhk_hk_00001 stockhk_hk_00001 NaT stockhk hk 00001 长和 None None +00002 stockhk_hk_00002 stockhk_hk_00002 NaT stockhk hk 00002 中电控股 None None +00003 stockhk_hk_00003 stockhk_hk_00003 NaT stockhk hk 00003 香港中华煤气 None None +00004 stockhk_hk_00004 stockhk_hk_00004 NaT stockhk hk 00004 九龙仓集团 None None +00005 stockhk_hk_00005 stockhk_hk_00005 NaT stockhk hk 00005 汇丰控股 None None +... ... ... ... ... ... ... ... ... ... +09996 stockhk_hk_09996 stockhk_hk_09996 NaT stockhk hk 09996 沛嘉医疗-B None None +09997 stockhk_hk_09997 stockhk_hk_09997 NaT stockhk hk 09997 康基医疗 None None +09998 stockhk_hk_09998 stockhk_hk_09998 NaT stockhk hk 09998 光荣控股 None None +09999 stockhk_hk_09999 stockhk_hk_09999 NaT stockhk hk 09999 网易-S None None +80737 stockhk_hk_80737 stockhk_hk_80737 NaT stockhk hk 80737 湾区发展-R None None -In [3]: from zvt.api import * +[2597 rows x 9 columns] -In [4]: df = get_kdata(entity_id='stock_sz_000338',provider='joinquant') +>>> df[df.code=='00700'] -In [5]: df.tail() -Out[5]: - id entity_id timestamp provider code name level open close high low volume turnover change_pct turnover_rate -timestamp -2019-10-29 stock_sz_000338_2019-10-29 stock_sz_000338 2019-10-29 joinquant 000338 潍柴动力 1d 12.00 11.78 12.02 11.76 28533132.0 3.381845e+08 None None -2019-10-30 stock_sz_000338_2019-10-30 stock_sz_000338 2019-10-30 joinquant 000338 潍柴动力 1d 11.74 12.05 12.08 11.61 42652561.0 5.066013e+08 None None -2019-10-31 stock_sz_000338_2019-10-31 stock_sz_000338 2019-10-31 joinquant 000338 潍柴动力 1d 12.05 11.56 12.08 11.50 77329380.0 9.010439e+08 None None -2019-11-01 stock_sz_000338_2019-11-01 stock_sz_000338 2019-11-01 joinquant 000338 潍柴动力 1d 11.55 12.69 12.70 11.52 160732771.0 1.974125e+09 None None -2019-11-04 stock_sz_000338_2019-11-04 stock_sz_000338 2019-11-04 joinquant 000338 潍柴动力 1d 12.77 13.00 13.11 12.77 126673139.0 1.643788e+09 None None -``` + id entity_id timestamp entity_type exchange code name list_date end_date +2112 stockhk_hk_00700 stockhk_hk_00700 None stockhk hk 00700 腾讯控股 None None -#### 财务数据 ``` -In [12]: from zvt.domain import * -In [13]: df = FinanceFactor.query_data(entity_id='stock_sz_000338',columns=FinanceFactor.important_cols()) -In [14]: df.tail() -Out[14]: - basic_eps total_op_income net_profit op_income_growth_yoy net_profit_growth_yoy roe rota gross_profit_margin net_margin timestamp -timestamp -2018-10-31 0.73 1.182000e+11 6.001000e+09 0.0595 0.3037 0.1647 0.0414 0.2164 0.0681 2018-10-31 -2019-03-26 1.08 1.593000e+11 8.658000e+09 0.0507 0.2716 0.2273 0.0589 0.2233 0.0730 2019-03-26 -2019-04-29 0.33 4.521000e+10 2.591000e+09 0.1530 0.3499 0.0637 0.0160 0.2166 0.0746 2019-04-29 -2019-08-30 0.67 9.086000e+10 5.287000e+09 0.1045 0.2037 0.1249 0.0315 0.2175 0.0759 2019-08-30 -2019-10-31 0.89 1.267000e+11 7.058000e+09 0.0721 0.1761 0.1720 0.0435 0.2206 0.0736 2019-10-31 +#### And more +``` +>>> from zvt.contract import * +>>> zvt_context.tradable_schema_map +{'stockus': zvt.domain.meta.stockus_meta.Stockus, + 'stockhk': zvt.domain.meta.stockhk_meta.Stockhk, + 'index': zvt.domain.meta.index_meta.Index, + 'etf': zvt.domain.meta.etf_meta.Etf, + 'stock': zvt.domain.meta.stock_meta.Stock, + 'block': zvt.domain.meta.block_meta.Block, + 'fund': zvt.domain.meta.fund_meta.Fund} ``` -#### 跑个策略 +The key is tradable entity type, and the value is the schema. The system provides unified **record (record_data)** and **query (query_data)** methods for the schema. + ``` -In [15]: from zvt.samples import * -In [16]: t = MyMaTrader(codes=['000338'], level=IntervalLevel.LEVEL_1DAY, start_timestamp='2018-01-01', - ...: end_timestamp='2019-06-30', trader_name='000338_ma_trader') -In [17]: t.run() +>>> Index.record_data() +>>> df=Index.query_data(filters=[Index.category=='scope',Index.exchange='sh']) +>>> print(df) + id entity_id timestamp entity_type exchange code name list_date end_date publisher category base_point +0 index_sh_000001 index_sh_000001 1990-12-19 index sh 000001 上证指数 1991-07-15 None csindex scope 100.00 +1 index_sh_000002 index_sh_000002 1990-12-19 index sh 000002 A股指数 1992-02-21 None csindex scope 100.00 +2 index_sh_000003 index_sh_000003 1992-02-21 index sh 000003 B股指数 1992-08-17 None csindex scope 100.00 +3 index_sh_000010 index_sh_000010 2002-06-28 index sh 000010 上证180 2002-07-01 None csindex scope 3299.06 +4 index_sh_000016 index_sh_000016 2003-12-31 index sh 000016 上证50 2004-01-02 None csindex scope 1000.00 +.. ... ... ... ... ... ... ... ... ... ... ... ... +25 index_sh_000020 index_sh_000020 2007-12-28 index sh 000020 中型综指 2008-05-12 None csindex scope 1000.00 +26 index_sh_000090 index_sh_000090 2009-12-31 index sh 000090 上证流通 2010-12-02 None csindex scope 1000.00 +27 index_sh_930903 index_sh_930903 2012-12-31 index sh 930903 中证A股 2016-10-18 None csindex scope 1000.00 +28 index_sh_000688 index_sh_000688 2019-12-31 index sh 000688 科创50 2020-07-23 None csindex scope 1000.00 +29 index_sh_931643 index_sh_931643 2019-12-31 index sh 931643 科创创业50 2021-06-01 None csindex scope 1000.00 + +[30 rows x 12 columns] ``` -测试数据里面包含的SAMPLE_STOCK_CODES = ['000001', '000783', '000778', '603220', '601318', '000338', '002572', '300027'],试一下传入其任意组合,即可看多标的的效果。 -## 2. 📝正式环境 -项目支持多环境切换,默认情况下,不设置环境变量TESTING_ZVT即为正式环境 - ``` -In [1]: from zvt import * -{'data_path': '/Users/xuanqi/zvt-home/data', - 'domain_module': 'zvt.domain', - 'email_password': '', - 'email_username': '', - 'http_proxy': '127.0.0.1:1087', - 'https_proxy': '127.0.0.1:1087', - 'jq_password': '', - 'jq_username': '', - 'log_path': '/Users/xuanqi/zvt-home/logs', - 'smtp_host': 'smtpdm.aliyun.com', - 'smtp_port': '80', - 'ui_path': '/Users/xuanqi/zvt-home/ui', - 'wechat_app_id': '', - 'wechat_app_secrect': '', - 'zvt_home': '/Users/xuanqi/zvt-home'} - ``` +### EntityEvent +We have tradable entity and then events about them. ->如果你不想使用使用默认的zvt_home目录,请设置环境变量ZVT_HOME再运行。 +#### Market quotes +the TradableEntity quote schema follows the following rules: +``` +{entity_shema}{level}{adjust_type}Kdata +``` +* entity_schema -所有操作跟测试环境是一致的,只是操作的目录不同。 +TradableEntity class,e.g., Stock,Stockus. -### 2.1 配置(可选) -在zvt_home目录中找到config.json进行配置: +* level +``` +>>> for level in IntervalLevel: + print(level.value) +``` -#### 使用聚宽数据需要设置: -* jq_username 聚宽数据用户名 -* jq_password 聚宽数据密码 +* adjust type +``` +>>> for adjust_type in AdjustType: + print(adjust_type.value) +``` -#### 使用邮箱推送需要设置: -* smtp_host 邮件服务器host -* smtp_port 邮件服务器端口 -* email_username smtp邮箱账户 -* email_password smtp邮箱密码 +> Note: In order to be compatible with historical data, the pre-reset is an exception, {adjust_type} is left empty -#### 使用微信公众号推送需要设置: -* wechat_app_id -* wechat_app_secrect +qfq +``` +>>> Stock1dKdata.record_data(code='000338', provider='em') +>>> df = Stock1dKdata.query_data(code='000338', provider='em') +>>> print(df) -### 2.2 下载历史数据(可选) -百度网盘: https://pan.baidu.com/s/1kHAxGSxx8r5IBHe5I7MAmQ 提取码: yb6c + id entity_id timestamp provider code name level open close high low volume turnover change_pct turnover_rate +0 stock_sz_000338_2007-04-30 stock_sz_000338 2007-04-30 None 000338 潍柴动力 1d 2.33 2.00 2.40 1.87 207375.0 1.365189e+09 3.2472 0.1182 +1 stock_sz_000338_2007-05-08 stock_sz_000338 2007-05-08 None 000338 潍柴动力 1d 2.11 1.94 2.20 1.87 86299.0 5.563198e+08 -0.0300 0.0492 +2 stock_sz_000338_2007-05-09 stock_sz_000338 2007-05-09 None 000338 潍柴动力 1d 1.90 1.81 1.94 1.66 93823.0 5.782065e+08 -0.0670 0.0535 +3 stock_sz_000338_2007-05-10 stock_sz_000338 2007-05-10 None 000338 潍柴动力 1d 1.78 1.85 1.98 1.75 47720.0 2.999226e+08 0.0221 0.0272 +4 stock_sz_000338_2007-05-11 stock_sz_000338 2007-05-11 None 000338 潍柴动力 1d 1.81 1.73 1.81 1.66 39273.0 2.373126e+08 -0.0649 0.0224 +... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... +3426 stock_sz_000338_2021-08-27 stock_sz_000338 2021-08-27 None 000338 潍柴动力 1d 19.39 20.30 20.30 19.25 1688497.0 3.370241e+09 0.0601 0.0398 +3427 stock_sz_000338_2021-08-30 stock_sz_000338 2021-08-30 None 000338 潍柴动力 1d 20.30 20.09 20.31 19.78 1187601.0 2.377957e+09 -0.0103 0.0280 +3428 stock_sz_000338_2021-08-31 stock_sz_000338 2021-08-31 None 000338 潍柴动力 1d 20.20 20.07 20.63 19.70 1143985.0 2.295195e+09 -0.0010 0.0270 +3429 stock_sz_000338_2021-09-01 stock_sz_000338 2021-09-01 None 000338 潍柴动力 1d 19.98 19.68 19.98 19.15 1218697.0 2.383841e+09 -0.0194 0.0287 +3430 stock_sz_000338_2021-09-02 stock_sz_000338 2021-09-02 None 000338 潍柴动力 1d 19.71 19.85 19.97 19.24 1023545.0 2.012006e+09 0.0086 0.0241 -google drive: https://drive.google.com/drive/folders/17Bxijq-PHJYrLDpyvFAm5P6QyhKL-ahn?usp=sharing +[3431 rows x 15 columns] -里面包含joinquant的日/周线后复权数据,个股估值,基金及其持仓数据,eastmoney的财务等数据。 +>>> Stockus1dKdata.record_data(code='AAPL', provider='em') +>>> df = Stockus1dKdata.query_data(code='AAPL', provider='em') +>>> print(df) -把下载的数据解压到正式环境的data_path(所有db文件放到该目录下,没有层级结构) + id entity_id timestamp provider code name level open close high low volume turnover change_pct turnover_rate +0 stockus_nasdaq_AAPL_1984-09-07 stockus_nasdaq_AAPL 1984-09-07 None AAPL 苹果 1d -5.59 -5.59 -5.58 -5.59 2981600.0 0.000000e+00 0.0000 0.0002 +1 stockus_nasdaq_AAPL_1984-09-10 stockus_nasdaq_AAPL 1984-09-10 None AAPL 苹果 1d -5.59 -5.59 -5.58 -5.59 2346400.0 0.000000e+00 0.0000 0.0001 +2 stockus_nasdaq_AAPL_1984-09-11 stockus_nasdaq_AAPL 1984-09-11 None AAPL 苹果 1d -5.58 -5.58 -5.58 -5.58 5444000.0 0.000000e+00 0.0018 0.0003 +3 stockus_nasdaq_AAPL_1984-09-12 stockus_nasdaq_AAPL 1984-09-12 None AAPL 苹果 1d -5.58 -5.59 -5.58 -5.59 4773600.0 0.000000e+00 -0.0018 0.0003 +4 stockus_nasdaq_AAPL_1984-09-13 stockus_nasdaq_AAPL 1984-09-13 None AAPL 苹果 1d -5.58 -5.58 -5.58 -5.58 7429600.0 0.000000e+00 0.0018 0.0004 +... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... +8765 stockus_nasdaq_AAPL_2021-08-27 stockus_nasdaq_AAPL 2021-08-27 None AAPL 苹果 1d 147.48 148.60 148.75 146.83 55802388.0 8.265452e+09 0.0072 0.0034 +8766 stockus_nasdaq_AAPL_2021-08-30 stockus_nasdaq_AAPL 2021-08-30 None AAPL 苹果 1d 149.00 153.12 153.49 148.61 90956723.0 1.383762e+10 0.0304 0.0055 +8767 stockus_nasdaq_AAPL_2021-08-31 stockus_nasdaq_AAPL 2021-08-31 None AAPL 苹果 1d 152.66 151.83 152.80 151.29 86453117.0 1.314255e+10 -0.0084 0.0052 +8768 stockus_nasdaq_AAPL_2021-09-01 stockus_nasdaq_AAPL 2021-09-01 None AAPL 苹果 1d 152.83 152.51 154.98 152.34 80313711.0 1.235321e+10 0.0045 0.0049 +8769 stockus_nasdaq_AAPL_2021-09-02 stockus_nasdaq_AAPL 2021-09-02 None AAPL 苹果 1d 153.87 153.65 154.72 152.40 71171317.0 1.093251e+10 0.0075 0.0043 -数据的更新是增量的,下载历史数据只是为了节省时间,全部自己更新也是可以的。 +[8770 rows x 15 columns] +``` -### 2.3 注册聚宽(可选) -项目数据支持多provider,在数据schema一致性的基础上,可根据需要进行选择和扩展,目前支持新浪,东财,交易所等免费数据。 +hfq +``` +>>> Stock1dHfqKdata.record_data(code='000338', provider='em') +>>> df = Stock1dHfqKdata.query_data(code='000338', provider='em') +>>> print(df) -#### 数据的设计上是让provider来适配schema,而不是反过来,这样即使某provider不可用了,换一个即可,不会影响整个系统的使用。 + id entity_id timestamp provider code name level open close high low volume turnover change_pct turnover_rate +0 stock_sz_000338_2007-04-30 stock_sz_000338 2007-04-30 None 000338 潍柴动力 1d 70.00 64.93 71.00 62.88 207375.0 1.365189e+09 2.1720 0.1182 +1 stock_sz_000338_2007-05-08 stock_sz_000338 2007-05-08 None 000338 潍柴动力 1d 66.60 64.00 68.00 62.88 86299.0 5.563198e+08 -0.0143 0.0492 +2 stock_sz_000338_2007-05-09 stock_sz_000338 2007-05-09 None 000338 潍柴动力 1d 63.32 62.00 63.88 59.60 93823.0 5.782065e+08 -0.0313 0.0535 +3 stock_sz_000338_2007-05-10 stock_sz_000338 2007-05-10 None 000338 潍柴动力 1d 61.50 62.49 64.48 61.01 47720.0 2.999226e+08 0.0079 0.0272 +4 stock_sz_000338_2007-05-11 stock_sz_000338 2007-05-11 None 000338 潍柴动力 1d 61.90 60.65 61.90 59.70 39273.0 2.373126e+08 -0.0294 0.0224 +... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... +3426 stock_sz_000338_2021-08-27 stock_sz_000338 2021-08-27 None 000338 潍柴动力 1d 331.97 345.95 345.95 329.82 1688497.0 3.370241e+09 0.0540 0.0398 +3427 stock_sz_000338_2021-08-30 stock_sz_000338 2021-08-30 None 000338 潍柴动力 1d 345.95 342.72 346.10 337.96 1187601.0 2.377957e+09 -0.0093 0.0280 +3428 stock_sz_000338_2021-08-31 stock_sz_000338 2021-08-31 None 000338 潍柴动力 1d 344.41 342.41 351.02 336.73 1143985.0 2.295195e+09 -0.0009 0.0270 +3429 stock_sz_000338_2021-09-01 stock_sz_000338 2021-09-01 None 000338 潍柴动力 1d 341.03 336.42 341.03 328.28 1218697.0 2.383841e+09 -0.0175 0.0287 +3430 stock_sz_000338_2021-09-02 stock_sz_000338 2021-09-02 None 000338 潍柴动力 1d 336.88 339.03 340.88 329.67 1023545.0 2.012006e+09 0.0078 0.0241 -但免费数据的缺点是显而易见的:不稳定,爬取清洗数据耗时耗力,维护代价巨大,且随时可能不可用。 -个人建议:如果只是学习研究,可以使用免费数据;如果是真正有意投身量化,还是选一家可靠的数据提供商。 +[3431 rows x 15 columns] +``` -项目支持聚宽的数据,可戳以下链接申请使用(目前可免费使用一年) -https://www.joinquant.com/default/index/sdk?channelId=953cbf5d1b8683f81f0c40c9d4265c0d +#### Finance factor +``` +>>> FinanceFactor.record_data(code='000338') +>>> FinanceFactor.query_data(code='000338',columns=FinanceFactor.important_cols(),index='timestamp') -> 项目中大部分的免费数据目前都是比较稳定的,且做过严格测试,特别是东财的数据,可放心使用 + basic_eps total_op_income net_profit op_income_growth_yoy net_profit_growth_yoy roe rota gross_profit_margin net_margin timestamp +timestamp +2002-12-31 NaN 1.962000e+07 2.471000e+06 NaN NaN NaN NaN 0.2068 0.1259 2002-12-31 +2003-12-31 1.27 3.574000e+09 2.739000e+08 181.2022 109.8778 0.7729 0.1783 0.2551 0.0766 2003-12-31 +2004-12-31 1.75 6.188000e+09 5.369000e+08 0.7313 0.9598 0.3245 0.1474 0.2489 0.0868 2004-12-31 +2005-12-31 0.93 5.283000e+09 3.065000e+08 -0.1463 -0.4291 0.1327 0.0603 0.2252 0.0583 2005-12-31 +2006-03-31 0.33 1.859000e+09 1.079000e+08 NaN NaN NaN NaN NaN 0.0598 2006-03-31 +... ... ... ... ... ... ... ... ... ... ... +2020-08-28 0.59 9.449000e+10 4.680000e+09 0.0400 -0.1148 0.0983 0.0229 0.1958 0.0603 2020-08-28 +2020-10-31 0.90 1.474000e+11 7.106000e+09 0.1632 0.0067 0.1502 0.0347 0.1949 0.0590 2020-10-31 +2021-03-31 1.16 1.975000e+11 9.207000e+09 0.1327 0.0112 0.1919 0.0444 0.1931 0.0571 2021-03-31 +2021-04-30 0.42 6.547000e+10 3.344000e+09 0.6788 0.6197 0.0622 0.0158 0.1916 0.0667 2021-04-30 +2021-08-31 0.80 1.264000e+11 6.432000e+09 0.3375 0.3742 0.1125 0.0287 0.1884 0.0653 2021-08-31 -> 添加其他数据提供商, 请参考[数据扩展教程](https://zvtvz.github.io/zvt/#/data_extending) +[66 rows x 10 columns] +``` -## 3. 数据 -下面介绍如何用一种**统一**的方式来回答三个问题:**有什么数据?如何更新数据?如何查询数据?** +#### Three financial tables +``` +>>> BalanceSheet.record_data(code='000338') +>>> IncomeStatement.record_data(code='000338') +>>> CashFlowStatement.record_data(code='000338') +``` -### 3.1 有什么数据? +#### And more ``` -In [1]: from zvt.contract import zvt_context -In [2]: from zvt.domain import * -In [3]: zvt_context.schemas +>>> zvt_context.schemas [zvt.domain.dividend_financing.DividendFinancing, zvt.domain.dividend_financing.DividendDetail, zvt.domain.dividend_financing.SpoDetail...] ``` -zvt_context.schemas为系统支持的schema,schema即表结构,即数据,其字段含义的查看方式如下: -* 源码 - -[domain](https://github.com/zvtvz/zvt/tree/master/zvt/domain)里的文件为schema的定义,查看相应字段的注释即可。 +All schemas is registered in zvt_context.schemas, **schema** is table, data structure. +The fields and meaning could be checked in following ways: * help -输入schema.按tab提示其包含的字段,或者.help() +type the schema. and press tab to show its fields or .help() ``` -In [4]: FinanceFactor.help() +>>> FinanceFactor.help() ``` -### 3.2 如何更新数据? -#### 只需要一个方法:record_data() +* source code -``` -#股票列表 -In [2]: Stock.record_data(provider='eastmoney') -#财务指标 -In [3]: FinanceFactor.record_data(codes=['000338']) -#资产负债表 -In [4]: BalanceSheet.record_data(codes=['000338']) -#利润表 -In [5]: IncomeStatement.record_data(codes=['000338']) -#现金流量表 -In [5]: CashFlowStatement.record_data(codes=['000338']) -``` +Schemas defined in [domain](https://github.com/zvtvz/zvt/tree/master/src/zvt/domain) -其他数据依样画葫芦即可。 +From above examples, you should know the unified way of recording data: -> 标准流程就是: Schema.record_data(provoder='your provoder',codes='the codes') +> Schema.record_data(provider='your provider',codes='the codes') -注意可选参数provider,其代表数据提供商,一个schema可以有多个provider,这是系统稳定的基石。 +Note the optional parameter provider, which represents the data provider. +A schema can have multiple providers, which is the cornerstone of system stability. -查看**已实现**的provider +Check the provider has been implemented: ``` -In [12]: Stock.provider_map_recorder -Out[12]: -{'joinquant': zvt.recorders.joinquant.meta.china_stock_meta_recorder.JqChinaStockRecorder, - 'exchange': zvt.recorders.exchange.china_stock_list_spider.ExchangeChinaStockListRecorder, - 'eastmoney': zvt.recorders.eastmoney.meta.china_stock_meta_recorder.EastmoneyChinaStockListRecorder} -``` -你可以使用任意一个provider来获取数据,默认使用第一个。 +>>> Stock.provider_map_recorder +{'joinquant': zvt.recorders.joinquant.meta.jq_stock_meta_recorder.JqChinaStockRecorder, + 'exchange': zvt.recorders.exchange.exchange_stock_meta_recorder.ExchangeStockMetaRecorder, + 'em': zvt.recorders.em.meta.em_stock_meta_recorder.EMStockRecorder, + 'eastmoney': zvt.recorders.eastmoney.meta.eastmoney_stock_meta_recorder.EastmoneyChinaStockListRecorder} +``` +You can use any provider to get the data, the first one is used by default. -再举个例子,股票板块数据获取: +One more example, the stock sector data recording: ``` -In [13]: Block.provider_map_recorder -Out[13]: -{'eastmoney': zvt.recorders.eastmoney.meta.china_stock_category_recorder.EastmoneyChinaBlockRecorder, - 'sina': zvt.recorders.sina.meta.sina_china_stock_category_recorder.SinaChinaBlockRecorder} +>>> Block.provider_map_recorder +{'eastmoney': zvt.recorders.eastmoney.meta.eastmoney_block_meta_recorder.EastmoneyChinaBlockRecorder, + 'sina': zvt.recorders.sina.meta.sina_block_recorder.SinaBlockRecorder} -In [14]: Block.record_data(provider='sina') +>>> Block.record_data(provider='sina') Block registered recorders:{'eastmoney': , 'sina': } 2020-03-04 23:56:48,931 INFO MainThread finish record sina blocks:industry 2020-03-04 23:56:49,450 INFO MainThread finish record sina blocks:concept ``` -再多了解一点record_data: -* 参数codes代表需要抓取的股票代码 -* 不传入codes则是全市场抓取 -* 该方法会把数据存储到本地并只做增量更新 +Learn more about record_data + +* The parameter code[single], codes[multiple] represent the stock codes to be recorded +* Recording the whole market if not set code, codes +* This method will store the data locally and only do incremental updates -定时任务的方式更新可参考[东财数据定时更新](https://github.com/zvtvz/zvt/blob/master/examples/recorders/eastmoney_data_runner1.py) +Refer to the scheduling recoding way[data runner](https://github.com/zvtvz/zvt/blob/master/examples/data_runner) -### 3.3 如何查询数据? -#### 只需要一个方法:query_data() +#### Market-wide stock selection -2018年年报 roe>8% 营收增长>8% 的前20个股 +After recording the data of the whole market, you can quickly query the required data locally. + +An example: the top 20 stocks with roe>8% and revenue growth>8% in the 2018 annual report ``` -In [38]: df=FinanceFactor.query_data(filters=[FinanceFactor.roe>0.08,FinanceFactor.report_period=='year',FinanceFactor.op_income_growth_yoy>0.08],start_timestamp='2019-01-01',order=FinanceFactor.roe.desc(),limit=20,columns=[FinanceFactor.code]+FinanceFactor.important_cols(),index='code') +>>> df=FinanceFactor.query_data(filters=[FinanceFactor.roe>0.08,FinanceFactor.report_period=='year',FinanceFactor.op_income_growth_yoy>0.08],start_timestamp='2019-01-01',order=FinanceFactor.roe.desc(),limit=20,columns=["code"]+FinanceFactor.important_cols(),index='code') -In [39]: df -Out[39]: code basic_eps total_op_income net_profit op_income_growth_yoy net_profit_growth_yoy roe rota gross_profit_margin net_margin timestamp code -000048 000048 1.1193 3.437000e+09 4.374000e+08 1.2179 3.8122 0.5495 0.0989 0.4286 0.1308 2019-04-15 -000629 000629 0.3598 1.516000e+10 3.090000e+09 0.6068 2.5796 0.5281 0.2832 0.2752 0.2086 2019-03-26 -000672 000672 1.8100 5.305000e+09 1.472000e+09 0.1563 0.8596 0.5047 0.2289 0.4670 0.2803 2019-04-11 +000048 000048 2.7350 4.919000e+09 1.101000e+09 0.4311 1.5168 0.7035 0.1988 0.5243 0.2355 2020-04-30 000912 000912 0.3500 4.405000e+09 3.516000e+08 0.1796 1.2363 4.7847 0.0539 0.2175 0.0795 2019-03-20 -000932 000932 2.2483 9.137000e+10 6.780000e+09 0.1911 0.6453 0.4866 0.1137 0.1743 0.0944 2019-03-28 -002607 002607 0.2200 6.237000e+09 1.153000e+09 0.5472 1.1967 0.7189 0.2209 0.5908 0.1848 2019-04-09 -002959 002959 2.0611 2.041000e+09 1.855000e+08 0.2396 0.2657 0.5055 0.2075 0.3251 0.0909 2019-07-15 -300107 300107 1.1996 1.418000e+09 6.560000e+08 1.6467 6.5338 0.5202 0.4661 0.6379 0.4625 2019-03-15 -300618 300618 3.6900 2.782000e+09 7.076000e+08 0.8994 0.5746 0.4965 0.2504 0.4530 0.2531 2019-04-26 -300776 300776 3.3900 3.649000e+08 1.679000e+08 1.2059 1.5013 0.7122 0.2651 0.6207 0.4602 2019-02-18 -300792 300792 2.7100 1.013000e+09 1.626000e+08 0.4378 0.1799 0.4723 0.3797 0.4259 0.1606 2019-09-16 -600399 600399 2.0100 5.848000e+09 2.607000e+09 0.1732 2.9493 9.6467 0.2979 0.1453 0.4459 2019-03-29 -600408 600408 0.8100 8.816000e+09 8.202000e+08 0.3957 3.9094 0.7501 0.1681 0.1535 0.1020 2019-03-22 -600423 600423 0.9000 2.009000e+09 3.903000e+08 0.0975 5.3411 1.6695 0.1264 0.1404 0.1871 2019-03-19 -600507 600507 2.0800 1.729000e+10 2.927000e+09 0.2396 0.1526 0.5817 0.3216 0.3287 0.1696 2019-02-22 -600678 600678 0.0900 4.240000e+08 3.168000e+07 1.2925 0.0948 0.7213 0.0689 0.2183 0.0742 2019-03-14 +002207 002207 0.2200 3.021000e+08 5.189000e+07 0.1600 1.1526 1.1175 0.1182 0.1565 0.1718 2020-04-27 +002234 002234 5.3300 3.276000e+09 1.610000e+09 0.8023 3.2295 0.8361 0.5469 0.5968 0.4913 2020-04-21 +002458 002458 3.7900 3.584000e+09 2.176000e+09 1.4326 4.9973 0.8318 0.6754 0.6537 0.6080 2020-02-20 +... ... ... ... ... ... ... ... ... ... ... ... +600701 600701 -3.6858 7.830000e+08 -3.814000e+09 1.3579 -0.0325 1.9498 -0.7012 0.4173 -4.9293 2020-04-29 +600747 600747 -1.5600 3.467000e+08 -2.290000e+09 2.1489 -0.4633 3.1922 -1.5886 0.0378 -6.6093 2020-06-30 600793 600793 1.6568 1.293000e+09 1.745000e+08 0.1164 0.8868 0.7490 0.0486 0.1622 0.1350 2019-04-30 600870 600870 0.0087 3.096000e+07 4.554000e+06 0.7773 1.3702 0.7458 0.0724 0.2688 0.1675 2019-03-30 -601003 601003 1.7987 4.735000e+10 4.610000e+09 0.1394 0.7420 0.5264 0.1920 0.1439 0.0974 2019-03-29 -603379 603379 2.9400 4.454000e+09 1.108000e+09 0.1423 0.1609 0.5476 0.3547 0.3959 0.2488 2019-03-13 +688169 688169 15.6600 4.205000e+09 7.829000e+08 0.3781 1.5452 0.7172 0.4832 0.3612 0.1862 2020-04-28 + +[20 rows x 11 columns] ``` -以上,基本上就可以应付大部分日常数据的使用了。 -如果你想扩展数据,可以参考详细文档里的数据部分。 -## 4. 数据即策略 -在介绍系统设计的二维索引多标的计算模型之前,我们先来介绍一种自由(solo)的策略模式。 -所谓策略回测,无非就是,重复以下过程: -### 在某时间点,找到符合条件的标的,对其进行买卖,看其表现。 -因为系统所有的数据都是时间序列数据,有着统一的查询方式,通过query_data可以快速得到符合条件的标的,所以,即使只会query_data,也可以solo一把了。 +So, you should be able to answer the following three questions now: +* What data is there? +* How to record data? +* How to query data? + +For more advanced usage and extended data, please refer to the data section in the detailed document. + +### Write strategy +Now we could write strategy basing on TradableEntity and EntityEvent. +The so-called strategy backtesting is nothing but repeating the following process: + +#### At a certain time, find the targets which matching conditions, buy and sell them, and see the performance. + +Two modes to write strategy: +* solo (free style) + +At a certain time, calculate conditions according to the events, buy and sell -[例子](https://github.com/zvtvz/zvt/blob/master/examples/trader/solo_traders.py) +* formal + +The calculation model of the two-dimensional index and multi-entity + +#### a too simple,sometimes naive person (solo) +Well, this strategy is really too simple,sometimes naive, as we do most of the time. +> When the report comes out, I look at the report. +> If the institution increases its position by more than 5%, I will buy it, and if the institution reduces its position by more than 50%, I will sell it. + +Show you the code: ``` -class MySoloTrader(StockTrader): - def on_time(self, timestamp): - # 增持5000股以上,买买买 - long_df = ManagerTrading.query_data(start_timestamp=timestamp, end_timestamp=timestamp, - filters=[ManagerTrading.volume > 5000], columns=[ManagerTrading.entity_id], - order=ManagerTrading.volume.desc(), limit=10) - # 减持5000股以上,闪闪闪 - short_df = ManagerTrading.query_data(start_timestamp=timestamp, end_timestamp=timestamp, - filters=[ManagerTrading.volume < -5000], - columns=[ManagerTrading.entity_id], - order=ManagerTrading.volume.asc(), limit=10) - if pd_is_not_null(long_df) or pd_is_not_null(short_df): - try: - self.trade_the_targets(due_timestamp=timestamp, happen_timestamp=timestamp, - long_selected=set(long_df['entity_id'].to_list()), - short_selected=set(short_df['entity_id'].to_list())) - except Exception as e: - self.logger.error(e) +# -*- coding: utf-8 -*- +import pandas as pd + +from zvt.api import get_recent_report_date +from zvt.contract import ActorType, AdjustType +from zvt.domain import StockActorSummary, Stock1dKdata +from zvt.trader import StockTrader +from zvt.utils import pd_is_not_null, is_same_date, to_pd_timestamp + + +class FollowIITrader(StockTrader): + finish_date = None + + def on_time(self, timestamp: pd.Timestamp): + recent_report_date = to_pd_timestamp(get_recent_report_date(timestamp)) + if self.finish_date and is_same_date(recent_report_date, self.finish_date): + return + filters = [StockActorSummary.actor_type == ActorType.raised_fund.value, + StockActorSummary.report_date == recent_report_date] + + if self.entity_ids: + filters = filters + [StockActorSummary.entity_id.in_(self.entity_ids)] + + df = StockActorSummary.query_data(filters=filters) + + if pd_is_not_null(df): + self.logger.info(f'{df}') + self.finish_date = recent_report_date + + long_df = df[df['change_ratio'] > 0.05] + short_df = df[df['change_ratio'] < -0.5] + try: + self.trade_the_targets(due_timestamp=timestamp, happen_timestamp=timestamp, + long_selected=set(long_df['entity_id'].to_list()), + short_selected=set(short_df['entity_id'].to_list())) + except Exception as e: + self.logger.error(e) + + +if __name__ == '__main__': + entity_id = 'stock_sh_600519' + Stock1dKdata.record_data(entity_id=entity_id, provider='em') + StockActorSummary.record_data(entity_id=entity_id, provider='em') + FollowIITrader(start_timestamp='2002-01-01', end_timestamp='2021-01-01', entity_ids=[entity_id], + provider='em', adjust_type=AdjustType.qfq, profit_threshold=None).run() ``` -你可以发挥想象力,社保重仓买买买,外资重仓买买买,董事长跟小姨子跑了卖卖卖...... -然后,刷新一下[http://127.0.0.1:8050/](http://127.0.0.1:8050/),看你运行策略的performance +So, writing a strategy is not that complicated. +Just use your imagination, find the relation of the price and the events. -更多可参考[策略例子](https://github.com/zvtvz/zvt/tree/master/examples/trader) +Then refresh [http://127.0.0.1:8050/](http://127.0.0.1:8050/),check the performance of your strategy. -## 5. 计算 -简单的计算可以通过query_data来完成,这里说的是系统设计的二维索引多标的计算模型。 +More examples is in [Strategy example](https://github.com/zvtvz/zvt/tree/master/examples/trader) -下面以技术因子为例对**计算流程**进行说明: +#### Be serious (formal) +Simple calculation can be done through query_data. +Now it's time to introduce the two-dimensional index multi-entity calculation model. + +Takes technical factors as an example to illustrate the **calculation process**: ``` -In [7]: from zvt.factors.technical_factor import * -In [8]: factor = BullFactor(codes=['000338','601318'],start_timestamp='2019-01-01',end_timestamp='2019-06-10', transformer=MacdTransformer()) +In [7]: from zvt.factors import * +In [8]: factor = BullFactor(codes=['000338','601318'],start_timestamp='2019-01-01',end_timestamp='2019-06-10', transformer=MacdTransformer(count_live_dead=True)) ``` ### data_df -data_df为factor的原始数据,即通过query_data从数据库读取到的数据,为一个**二维索引**DataFrame + +**two-dimensional index** DataFrame read from the schema by query_data. ``` In [11]: factor.data_df Out[11]: @@ -382,8 +552,8 @@ stock_sz_000338 2019-06-03 1d 11.04 stock_sz_000338_2019-06-03 stock_sz_00 ``` ### factor_df -factor_df为transformer对data_df进行计算后得到的数据,设计因子即对[transformer](https://github.com/zvtvz/zvt/blob/master/zvt/factors/factor.py#L18)进行扩展,例子中用的是MacdTransformer()。 - +**two-dimensional index** DataFrame which calculating using data_df by [transformer](https://github.com/zvtvz/zvt/blob/master/src/zvt/contract/factor.py#L34) +e.g., MacdTransformer. ``` In [12]: factor.factor_df Out[12]: @@ -405,13 +575,15 @@ stock_sz_000338 2019-06-03 1d 11.04 stock_sz_000338_2019-06-03 stock_sz_00 ``` ### result_df -result_df为可用于选股器的**二维索引**DataFrame,通过对data_df或factor_df计算来实现。 -该例子在计算macd之后,利用factor_df,黄白线在0轴上为True,否则为False,[具体代码](https://github.com/zvtvz/zvt/blob/master/zvt/factors/technical_factor.py#L56) +**two-dimensional index** DataFrame which calculating using factor_df or(and) data_df. +It's used by TargetSelector. + +e.g.,[macd](https://github.com/zvtvz/zvt/blob/master/src/zvt/factors/technical_factor.py#L56) ``` In [14]: factor.result_df Out[14]: - score + filter_result entity_id timestamp stock_sh_601318 2019-01-02 False 2019-01-03 False @@ -428,79 +600,102 @@ stock_sz_000338 2019-06-03 False [208 rows x 1 columns] ``` -不同类型Factor的result_df格式如下: +The format of result_df is as follows: -* filter类型 -

+

-* score类型 -

+filter_result is True or False, score_result is from 0 to 1 -结合选股器和回测,整个流程如下: +Combining the stock picker and backtesting, the whole process is as follows:

-## 5. 🚀开发 +## Env settings(optional) +``` +>>> from zvt import * +>>> zvt_env +{'zvt_home': '/Users/foolcage/zvt-home', + 'data_path': '/Users/foolcage/zvt-home/data', + 'tmp_path': '/Users/foolcage/zvt-home/tmp', + 'ui_path': '/Users/foolcage/zvt-home/ui', + 'log_path': '/Users/foolcage/zvt-home/logs'} -### 5.1 clone代码 +>>> zvt_config +``` + +* jq_username 聚宽数据用户名 +* jq_password 聚宽数据密码 +* smtp_host 邮件服务器host +* smtp_port 邮件服务器端口 +* email_username smtp邮箱账户 +* email_password smtp邮箱密码 +* wechat_app_id +* wechat_app_secrect + +``` +>>> init_config(current_config=zvt_config, jq_username='xxx', jq_password='yyy') +``` +> config others this way: init_config(current_config=zvt_config, **kv) + +### History data + +ZVT supports incremental data updates, and sharing historical data among users is encouraged for time-saving efficiency + +#### Data providers +> The new UI's real-time quotes are based on the QMT data source. To obtain access, please contact the author. + +the data could be updated from different provider, this make the system stable. + +> add other providers, [Data extension tutorial](https://zvtvz.github.io/zvt/#/data_extending) + +## Development + +### Clone ``` git clone https://github.com/zvtvz/zvt.git ``` -设置项目的virtual env(python>=3.6),安装依赖 +set up virtual env(python>=3.8),install requirements ``` pip3 install -r requirements.txt pip3 install pytest ``` -### 5.2 测试案例 -pycharm导入工程(推荐,你也可以使用其他ide),然后pytest跑测试案例 +### Tests +```shell +pytest ./tests --ignore=tests/recorders/ +```

-大部分功能使用都可以从tests里面参考 - -## ✨ 特性 -- **丰富全面开箱即用可扩展可持续增量更新的数据** - - A股数据:行情,财务报表,大股东行为,高管交易,分红融资详情,个股板块资金流向,融资融券,龙虎榜等数据 - - 市场整体pe,pb,资金流,融资融券,外资动向等数据 - - 数字货币数据 -- 数据的标准化,多数据源(provider)交叉验证,补全 -- **简洁可扩展的数据框架** -- **统一简洁的API,支持sql查询,支持pandas** -- 可扩展的factor,对单标的和多标的的运算抽象了一种统一的计算方式 -- **支持多标的,多factor,多级别的回测方式** -- 支持交易信号和策略使用到的factor的实时可视化 -- 支持多种实盘交易(实现中) +Most of the features can be referenced from the tests -## 💡贡献 -期待能有更多的开发者参与到 zvt 的开发中来,我会保证尽快 Reivew PR 并且及时回复。但提交 PR 请确保 +## Contribution -先看一下[1分钟代码规范](https://github.com/zvtvz/zvt/blob/master/code_of_conduct.md) +[code of conduct](https://github.com/zvtvz/zvt/blob/master/code_of_conduct.md) -1. 通过所有单元测试,如若是新功能,请为其新增单元测试 -2. 遵守开发规范 -3. 如若需要,请更新相对应的文档 +1. Pass all unit tests, if it is a new feature, please add a new unit test for it +2. Compliance with development specifications +3. If necessary, please update the corresponding document -也非常欢迎开发者能为 zvt 提供更多的示例,共同来完善文档。 +Developers are also very welcome to provide more examples for zvt, and work together to improve the documentation. -## 💌请作者喝杯咖啡 +## Buy me a coffee -如果你觉得项目对你有帮助,可以请作者喝杯咖啡 Alipay      Wechat -## 🤝联系方式 +## Contact -加微信进群:foolcage 添加暗号:zvt +wechat:foolcage Wechat ------ -微信公众号: +wechat subscription: Wechat -知乎专栏: +zhihu: https://zhuanlan.zhihu.com/automoney ## Thanks -

jetbrains

\ No newline at end of file +

jetbrains

diff --git a/api-tests/create_stock_pool_info.http b/api-tests/create_stock_pool_info.http new file mode 100644 index 00000000..1c43a8de --- /dev/null +++ b/api-tests/create_stock_pool_info.http @@ -0,0 +1,8 @@ +POST http://127.0.0.1:8090/api/work/create_stock_pool_info +accept: application/json +Content-Type: application/json + +{ + "stock_pool_name": "核心资产", + "stock_pool_type": "custom" +} \ No newline at end of file diff --git a/api-tests/create_stock_pools.http b/api-tests/create_stock_pools.http new file mode 100644 index 00000000..39dc53f2 --- /dev/null +++ b/api-tests/create_stock_pools.http @@ -0,0 +1,10 @@ +POST http://127.0.0.1:8090/api/work/create_stock_pools +accept: application/json +Content-Type: application/json + +{ + "stock_pool_name": "核心资产", + "entity_ids": [ + "stock_sh_600519" + ] +} \ No newline at end of file diff --git a/api-tests/event/create_stock_topic.http b/api-tests/event/create_stock_topic.http new file mode 100644 index 00000000..ea127104 --- /dev/null +++ b/api-tests/event/create_stock_topic.http @@ -0,0 +1,19 @@ +POST http://127.0.0.1:8090/api/event/create_stock_topic +accept: application/json +Content-Type: application/json + +{ + "name": "特斯拉FSD入华", + "desc": "Tesla Al在社交媒体平台“X”上发帖称,特斯拉计划明年第一季度在中国和欧洲推出被其称为“全自动驾驶”(Full Self Driving)的高级驾驶辅助系统,目前正在等待监管部门的批准。", + "created_timestamp": "2024-09-05 09:00:00", + "trigger_date": "2024-09-05", + "due_date": "2025-01-01", + "main_tag": "车路云", + "sub_tag_list": [ + "自动驾驶", + "车路云" + ] +} + + + diff --git a/api-tests/event/get_stock_event.http b/api-tests/event/get_stock_event.http new file mode 100644 index 00000000..9c0bcf4f --- /dev/null +++ b/api-tests/event/get_stock_event.http @@ -0,0 +1,5 @@ +GET http://127.0.0.1:8090/api/event/get_stock_event?entity_id=stock_sz_000034 +accept: application/json + + + diff --git a/api-tests/event/get_stock_news_analysis.http b/api-tests/event/get_stock_news_analysis.http new file mode 100644 index 00000000..ae2dc136 --- /dev/null +++ b/api-tests/event/get_stock_news_analysis.http @@ -0,0 +1,5 @@ +GET http://127.0.0.1:8090/api/event/get_stock_news_analysis +accept: application/json + + + diff --git a/api-tests/event/get_tag_suggestions_stats.http b/api-tests/event/get_tag_suggestions_stats.http new file mode 100644 index 00000000..31bb40b5 --- /dev/null +++ b/api-tests/event/get_tag_suggestions_stats.http @@ -0,0 +1,5 @@ +GET http://127.0.0.1:8090/api/event/get_tag_suggestions_stats +accept: application/json + + + diff --git a/api-tests/event/ignore_stock_news.http b/api-tests/event/ignore_stock_news.http new file mode 100644 index 00000000..0e533764 --- /dev/null +++ b/api-tests/event/ignore_stock_news.http @@ -0,0 +1,10 @@ +POST http://127.0.0.1:8090/api/event/ignore_stock_news +accept: application/json +Content-Type: application/json + +{ + "news_id": "stock_sz_000034_2024-07-17 16:08:17" +} + + + diff --git a/api-tests/event/query_stock_topic.http b/api-tests/event/query_stock_topic.http new file mode 100644 index 00000000..045d0d3f --- /dev/null +++ b/api-tests/event/query_stock_topic.http @@ -0,0 +1,10 @@ +POST http://127.0.0.1:8090/api/event/query_stock_topic +accept: application/json +Content-Type: application/json + +{ + "limit": 20 +} + + + diff --git a/api-tests/event/update_stock_topic.http b/api-tests/event/update_stock_topic.http new file mode 100644 index 00000000..7046d462 --- /dev/null +++ b/api-tests/event/update_stock_topic.http @@ -0,0 +1,18 @@ +POST http://127.0.0.1:8090/api/event/update_stock_topic +accept: application/json +Content-Type: application/json + +{ + "id": "admin_特斯拉FSD入华", + "desc": "Tesla Al在社交媒体平台“X”上发帖称,特斯拉计划明年第一季度在中国和欧洲推出被其称为“全自动驾驶”(Full Self Driving)的高级驾驶辅助系统,目前正在等待监管部门的批准。", + "created_timestamp": "2024-09-05 09:00:00", + "trigger_date": "2024-09-05", + "due_date": "2025-01-01", + "main_tag": "车路云", + "sub_tag_list": [ + "自动驾驶", + "车路云" + ] +} + + diff --git a/api-tests/factor/get_factors.http b/api-tests/factor/get_factors.http new file mode 100644 index 00000000..36d5791e --- /dev/null +++ b/api-tests/factor/get_factors.http @@ -0,0 +1,5 @@ +GET http://127.0.0.1:8090/api/factor/get_factors +accept: application/json + + + diff --git a/api-tests/factor/query_factor_result.http b/api-tests/factor/query_factor_result.http new file mode 100644 index 00000000..55367c53 --- /dev/null +++ b/api-tests/factor/query_factor_result.http @@ -0,0 +1,11 @@ +POST http://127.0.0.1:8090/api/factor/query_factor_result +accept: application/json +Content-Type: application/json + + +{ + "entity_ids": [ + "stock_sz_300133" + ], + "factor_name": "LiveOrDeadFactor" +} diff --git a/api-tests/get_stock_pool_info.http b/api-tests/get_stock_pool_info.http new file mode 100644 index 00000000..ad78231c --- /dev/null +++ b/api-tests/get_stock_pool_info.http @@ -0,0 +1,5 @@ +GET http://127.0.0.1:8090/api/work/get_stock_pool_info +accept: application/json + + + diff --git a/api-tests/get_stock_pools.http b/api-tests/get_stock_pools.http new file mode 100644 index 00000000..0c6f1c15 --- /dev/null +++ b/api-tests/get_stock_pools.http @@ -0,0 +1,5 @@ +GET http://127.0.0.1:8090/api/work/get_stock_pools?stock_pool_name=main_line +accept: application/json + + + diff --git a/api-tests/tag/batch_set_stock_tags.http b/api-tests/tag/batch_set_stock_tags.http new file mode 100644 index 00000000..a74c2cc3 --- /dev/null +++ b/api-tests/tag/batch_set_stock_tags.http @@ -0,0 +1,18 @@ +POST http://127.0.0.1:8090/api/work/batch_set_stock_tags +accept: application/json +Content-Type: application/json + + +{ + "tag": "脑机接口", + "tag_type": "sub_tag", + "entity_ids": [ + "stock_sh_600775", + "stock_sz_002173", + "stock_sz_301293", + "stock_sz_300753", + "stock_sz_300430", + "stock_sz_002243" + ], + "tag_reason": "脑机接口消息刺激" +} diff --git a/api-tests/tag/build_main_tag_industry_relation.http b/api-tests/tag/build_main_tag_industry_relation.http new file mode 100644 index 00000000..ff521d0e --- /dev/null +++ b/api-tests/tag/build_main_tag_industry_relation.http @@ -0,0 +1,8 @@ +POST http://127.0.0.1:8090/api/work/build_main_tag_industry_relation +accept: application/json +Content-Type: application/json + +{ + "main_tag": "华为", + "industry_list": ["通信设备"] +} \ No newline at end of file diff --git a/api-tests/tag/build_main_tag_sub_tag_relation.http b/api-tests/tag/build_main_tag_sub_tag_relation.http new file mode 100644 index 00000000..5c9da65c --- /dev/null +++ b/api-tests/tag/build_main_tag_sub_tag_relation.http @@ -0,0 +1,13 @@ +POST http://127.0.0.1:8090/api/work/build_main_tag_sub_tag_relation +accept: application/json +Content-Type: application/json + +{ + "main_tag": "华为", + "sub_tag_list": [ + "华为汽车", + "华为概念", + "华为欧拉", + "华为昇腾" + ] +} \ No newline at end of file diff --git a/api-tests/tag/build_stock_tags.http b/api-tests/tag/build_stock_tags.http new file mode 100644 index 00000000..ba957231 --- /dev/null +++ b/api-tests/tag/build_stock_tags.http @@ -0,0 +1,15 @@ +POST http://127.0.0.1:8090/api/work/build_stock_tags +accept: application/json +Content-Type: application/json + +[ + { + "entity_id": "stock_sz_002085", + "name": "万丰奥威", + "main_tag": "低空经济", + "main_tag_reason": "2023年12月27日回复称,公司钻石eDA40纯电动飞机已成功首飞;eVTOL项目已联动海外钻石技术开发团队,在绿色、智能、垂直起降等方面的设计体现未来领域应用场景。", + "sub_tag": "低空经济", + "sub_tag_reason": "2023年12月27日回复称,公司钻石eDA40纯电动飞机已成功首飞;eVTOL项目已联动海外钻石技术开发团队,在绿色、智能、垂直起降等方面的设计体现未来领域应用场景。", + "active_hidden_tags": null + } +] \ No newline at end of file diff --git a/api-tests/tag/change_main_tag.http b/api-tests/tag/change_main_tag.http new file mode 100644 index 00000000..48cb614e --- /dev/null +++ b/api-tests/tag/change_main_tag.http @@ -0,0 +1,8 @@ +POST http://127.0.0.1:8090/api/work/change_main_tag +accept: application/json +Content-Type: application/json + +{ + "current_main_tag": "医疗器械", + "new_main_tag": "医药" +} \ No newline at end of file diff --git a/api-tests/tag/create_hidden_tag_info.http b/api-tests/tag/create_hidden_tag_info.http new file mode 100644 index 00000000..d5c65844 --- /dev/null +++ b/api-tests/tag/create_hidden_tag_info.http @@ -0,0 +1,8 @@ +POST http://127.0.0.1:8090/api/work/create_hidden_tag_info +accept: application/json +Content-Type: application/json + +{ + "tag": "中字头", + "tag_reason": "央企,国资委控股" +} \ No newline at end of file diff --git a/api-tests/tag/create_main_tag_info.http b/api-tests/tag/create_main_tag_info.http new file mode 100644 index 00000000..7caa64ba --- /dev/null +++ b/api-tests/tag/create_main_tag_info.http @@ -0,0 +1,8 @@ +POST http://127.0.0.1:8090/api/work/create_main_tag_info +accept: application/json +Content-Type: application/json + +{ + "tag": "未知", + "tag_reason": "行业定位不清晰" +} \ No newline at end of file diff --git a/api-tests/tag/create_sub_tag_info.http b/api-tests/tag/create_sub_tag_info.http new file mode 100644 index 00000000..3183a3ff --- /dev/null +++ b/api-tests/tag/create_sub_tag_info.http @@ -0,0 +1,8 @@ +POST http://127.0.0.1:8090/api/work/create_sub_tag_info +accept: application/json +Content-Type: application/json + +{ + "tag": "低空经济", + "tag_reason": "低空经济是飞行器和各种产业形态的融合,例如\"无人机+配送\"、\"直升机或evto载人飞行\"、\"无人机+应急救援\"、\"无人机+工业场景巡检\"等" +} \ No newline at end of file diff --git a/api-tests/tag/get_hidden_tag_info.http b/api-tests/tag/get_hidden_tag_info.http new file mode 100644 index 00000000..007750a1 --- /dev/null +++ b/api-tests/tag/get_hidden_tag_info.http @@ -0,0 +1,4 @@ +GET http://127.0.0.1:8090/api/work/get_hidden_tag_info +accept: application/json + + diff --git a/api-tests/tag/get_industry_info.http b/api-tests/tag/get_industry_info.http new file mode 100644 index 00000000..c1fac747 --- /dev/null +++ b/api-tests/tag/get_industry_info.http @@ -0,0 +1,4 @@ +GET http://127.0.0.1:8090/api/work/get_industry_info +accept: application/json + + diff --git a/api-tests/tag/get_main_tag_industry_relation.http b/api-tests/tag/get_main_tag_industry_relation.http new file mode 100644 index 00000000..7b44ce9b --- /dev/null +++ b/api-tests/tag/get_main_tag_industry_relation.http @@ -0,0 +1,4 @@ +GET http://127.0.0.1:8090/api/work/get_main_tag_industry_relation?main_tag=华为 +accept: application/json + + diff --git a/api-tests/tag/get_main_tag_info.http b/api-tests/tag/get_main_tag_info.http new file mode 100644 index 00000000..dc1d8387 --- /dev/null +++ b/api-tests/tag/get_main_tag_info.http @@ -0,0 +1,4 @@ +GET http://127.0.0.1:8090/api/work/get_main_tag_info +accept: application/json + + diff --git a/api-tests/tag/get_main_tag_sub_tag_relation.http b/api-tests/tag/get_main_tag_sub_tag_relation.http new file mode 100644 index 00000000..a4886faa --- /dev/null +++ b/api-tests/tag/get_main_tag_sub_tag_relation.http @@ -0,0 +1,4 @@ +GET http://127.0.0.1:8090/api/work/get_main_tag_sub_tag_relation?main_tag=华为 +accept: application/json + + diff --git a/api-tests/tag/get_stock_tag_options.http b/api-tests/tag/get_stock_tag_options.http new file mode 100644 index 00000000..4b567d0d --- /dev/null +++ b/api-tests/tag/get_stock_tag_options.http @@ -0,0 +1,4 @@ +GET http://127.0.0.1:8090/api/work/get_stock_tag_options?entity_id=stock_sh_600733 +accept: application/json + + diff --git a/api-tests/tag/get_sub_tag_info.http b/api-tests/tag/get_sub_tag_info.http new file mode 100644 index 00000000..58329c1e --- /dev/null +++ b/api-tests/tag/get_sub_tag_info.http @@ -0,0 +1,4 @@ +GET http://127.0.0.1:8090/api/work/get_sub_tag_info +accept: application/json + + diff --git a/api-tests/tag/query_simple_stock_tags.http b/api-tests/tag/query_simple_stock_tags.http new file mode 100644 index 00000000..7ec4c6f3 --- /dev/null +++ b/api-tests/tag/query_simple_stock_tags.http @@ -0,0 +1,12 @@ +POST http://127.0.0.1:8090/api/work/query_simple_stock_tags +accept: application/json +Content-Type: application/json + + +{ + "entity_ids": [ + "stock_sz_002085", + "stock_sz_000099", + "stock_sz_002130" + ] +} \ No newline at end of file diff --git a/api-tests/tag/query_stock_tag_stats.http b/api-tests/tag/query_stock_tag_stats.http new file mode 100644 index 00000000..20f9b59b --- /dev/null +++ b/api-tests/tag/query_stock_tag_stats.http @@ -0,0 +1,7 @@ +POST http://127.0.0.1:8090/api/work/query_stock_tag_stats +accept: application/json +Content-Type: application/json + +{ + "stock_pool_name": "main_line" +} \ No newline at end of file diff --git a/api-tests/tag/query_stock_tags.http b/api-tests/tag/query_stock_tags.http new file mode 100644 index 00000000..a1e655d5 --- /dev/null +++ b/api-tests/tag/query_stock_tags.http @@ -0,0 +1,12 @@ +POST http://127.0.0.1:8090/api/work/query_stock_tags +accept: application/json +Content-Type: application/json + + +{ + "entity_ids": [ + "stock_sz_000099", + "stock_sz_002085", + "stock_sz_001696" + ] +} diff --git a/api-tests/tag/set_stock_tags.http b/api-tests/tag/set_stock_tags.http new file mode 100644 index 00000000..39774a3d --- /dev/null +++ b/api-tests/tag/set_stock_tags.http @@ -0,0 +1,17 @@ +POST http://127.0.0.1:8090/api/work/set_stock_tags +accept: application/json +Content-Type: application/json + +{ + "entity_id": "stock_sz_001366", + "name": "播恩集团", + "main_tag": "医药", + "main_tag_reason": "合成生物概念", + "main_tags": { + "农业": "来自行业:农牧饲渔" + }, + "sub_tag": "生物医药", + "sub_tag_reason": "合成生物概念", + "sub_tags": null, + "active_hidden_tags": null + } \ No newline at end of file diff --git a/api-tests/test.http b/api-tests/test.http new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/api-tests/test.http @@ -0,0 +1 @@ + diff --git a/api-tests/trading/build_query_stock_quote_setting.http b/api-tests/trading/build_query_stock_quote_setting.http new file mode 100644 index 00000000..5ef65fc1 --- /dev/null +++ b/api-tests/trading/build_query_stock_quote_setting.http @@ -0,0 +1,8 @@ +POST http://127.0.0.1:8090/api/trading/build_query_stock_quote_setting +accept: application/json +Content-Type: application/json + +{ + "stock_pool_name": "main_line", + "main_tags": ["低空经济","新能源"] +} \ No newline at end of file diff --git a/api-tests/trading/build_trading_plan.http b/api-tests/trading/build_trading_plan.http new file mode 100644 index 00000000..668df7c5 --- /dev/null +++ b/api-tests/trading/build_trading_plan.http @@ -0,0 +1,13 @@ +POST http://127.0.0.1:8090/api/trading/build_trading_plan +accept: application/json +Content-Type: application/json + +{ + "stock_id": "stock_sz_300133", + "trading_date": "2024-04-23", + "expected_open_pct": 0.02, + "buy_price": 6.9, + "sell_price": null, + "trading_reason": "主线", + "trading_signal_type": "open_long" +} diff --git a/api-tests/trading/get_current_trading_plan.http b/api-tests/trading/get_current_trading_plan.http new file mode 100644 index 00000000..12b434b4 --- /dev/null +++ b/api-tests/trading/get_current_trading_plan.http @@ -0,0 +1,5 @@ +GET http://127.0.0.1:8090/api/trading/get_current_trading_plan +accept: application/json + + + diff --git a/api-tests/trading/get_future_trading_plan.http b/api-tests/trading/get_future_trading_plan.http new file mode 100644 index 00000000..bdda5385 --- /dev/null +++ b/api-tests/trading/get_future_trading_plan.http @@ -0,0 +1,5 @@ +GET http://127.0.0.1:8090/api/trading/get_future_trading_plan +accept: application/json + + + diff --git a/api-tests/trading/get_query_stock_quote_setting.http b/api-tests/trading/get_query_stock_quote_setting.http new file mode 100644 index 00000000..8f4ec923 --- /dev/null +++ b/api-tests/trading/get_query_stock_quote_setting.http @@ -0,0 +1,5 @@ +GET http://127.0.0.1:8090/api/trading/get_query_stock_quote_setting +accept: application/json + + + diff --git a/api-tests/trading/get_quote_stats.http b/api-tests/trading/get_quote_stats.http new file mode 100644 index 00000000..abad3cbb --- /dev/null +++ b/api-tests/trading/get_quote_stats.http @@ -0,0 +1,2 @@ +GET http://127.0.0.1:8090/api/trading/get_quote_stats +accept: application/json diff --git a/api-tests/trading/query_kdata.http b/api-tests/trading/query_kdata.http new file mode 100644 index 00000000..9bf63c61 --- /dev/null +++ b/api-tests/trading/query_kdata.http @@ -0,0 +1,13 @@ +POST http://127.0.0.1:8090/api/trading/query_kdata +accept: application/json +Content-Type: application/json + + +{ + "data_provider": "em", + "entity_ids": [ + "stock_sz_002085", + "stock_sz_300133" + ], + "adjust_type": "hfq" +} diff --git a/api-tests/trading/query_stock_quotes.http b/api-tests/trading/query_stock_quotes.http new file mode 100644 index 00000000..5805111a --- /dev/null +++ b/api-tests/trading/query_stock_quotes.http @@ -0,0 +1,10 @@ +POST http://127.0.0.1:8090/api/trading/query_stock_quotes +accept: application/json +Content-Type: application/json + +{ + "main_tag": "低空经济", + "stock_pool_name": "vol_up", + "limit": 500, + "order_by_field": "change_pct" +} diff --git a/api-tests/trading/query_tag_quotes.http b/api-tests/trading/query_tag_quotes.http new file mode 100644 index 00000000..bfadce6f --- /dev/null +++ b/api-tests/trading/query_tag_quotes.http @@ -0,0 +1,13 @@ +POST http://127.0.0.1:8090/api/trading/query_tag_quotes +accept: application/json +Content-Type: application/json + +{ + "main_tags": [ + "低空经济", + "半导体", + "化工", + "消费电子" + ], + "stock_pool_name": "main_line" +} diff --git a/api-tests/trading/query_trading_plan.http b/api-tests/trading/query_trading_plan.http new file mode 100644 index 00000000..944ad194 --- /dev/null +++ b/api-tests/trading/query_trading_plan.http @@ -0,0 +1,12 @@ +POST http://127.0.0.1:8090/api/trading/query_trading_plan +accept: application/json +Content-Type: application/json + +{ + "time_range": { + "relative_time_range": { + "interval": -30, + "time_unit": "day" + } + } +} diff --git a/api-tests/trading/query_ts.http b/api-tests/trading/query_ts.http new file mode 100644 index 00000000..f91f7d6e --- /dev/null +++ b/api-tests/trading/query_ts.http @@ -0,0 +1,11 @@ +POST http://127.0.0.1:8090/api/trading/query_ts +accept: application/json +Content-Type: application/json + + +{ + "entity_ids": [ + "stock_sz_002085", + "stock_sz_300133" + ] +} diff --git a/build.sh b/build.sh new file mode 100644 index 00000000..c6ac05d8 --- /dev/null +++ b/build.sh @@ -0,0 +1 @@ +python3 setup.py sdist bdist_wheel \ No newline at end of file diff --git a/docs/.nojekyll b/docs/.nojekyll deleted file mode 100644 index e69de29b..00000000 diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 00000000..d0c3cbf1 --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = source +BUILDDIR = build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/_navbar.md b/docs/_navbar.md deleted file mode 100644 index 8bab00f4..00000000 --- a/docs/_navbar.md +++ /dev/null @@ -1,4 +0,0 @@ - - -* [中文](/) -* [english](/en/) \ No newline at end of file diff --git a/docs/_sidebar.md b/docs/_sidebar.md deleted file mode 100644 index 89e88935..00000000 --- a/docs/_sidebar.md +++ /dev/null @@ -1,14 +0,0 @@ -- 入坑 - - [简介](intro.md "zvt intro") - - [快速开始](quick-start.md "zvt quick start") -- 数据 - - [数据总览](data_overview.md "zvt data overview") - - [数据列表](data_list.md "zvt data list") - - [数据更新](data_recorder.md "zvt data recorder") - - [数据使用](data_usage.md "zvt data usage") - - [数据扩展](data_extending.md "zvt data extending") -- 计算 - - [因子计算](factor.md "zvt factor") - - [回测通知](trader.md "zvt trader") -- [设计哲学](design-philosophy.md "zvt design philosophy") -- [支持项目](donate.md "donate for zvt") diff --git a/docs/arch.png b/docs/arch.png deleted file mode 100644 index fcb3e80a..00000000 Binary files a/docs/arch.png and /dev/null differ diff --git a/docs/architecture.eddx b/docs/architecture.eddx deleted file mode 100644 index 8b2dedf6..00000000 Binary files a/docs/architecture.eddx and /dev/null differ diff --git a/docs/architecture.png b/docs/architecture.png deleted file mode 100644 index ccdfe9b8..00000000 Binary files a/docs/architecture.png and /dev/null differ diff --git a/docs/data.eddx b/docs/data.eddx deleted file mode 100644 index 71a125f8..00000000 Binary files a/docs/data.eddx and /dev/null differ diff --git a/docs/data.png b/docs/data.png deleted file mode 100644 index 71fa714d..00000000 Binary files a/docs/data.png and /dev/null differ diff --git a/docs/data_extending.md b/docs/data_extending.md deleted file mode 100644 index 51c22ed7..00000000 --- a/docs/data_extending.md +++ /dev/null @@ -1,266 +0,0 @@ -## 数据扩展要点 - -* zvt里面只有两种数据,EntityMixin和Mixin - - EntityMixin为投资标的信息,Mixin为其发生的事。任何一个投资品种,首先是定义EntityMixin,然后是其相关的Mixin。 - 比如Stock(EntityMixin),及其相关的BalanceSheet,CashFlowStatement(Mixin)等。 - -* zvt的数据可以记录(record_data方法) - - 记录数据可以通过扩展以下类来实现: - - * Recorder - - 最基本的类,实现了关联data_schema和recorder的功能。记录EntityMixin一般继承该类。 - - * RecorderForEntities - - 实现了初始化需要记录的**投资标的列表**的功能,有了标的,才能记录标的发生的事。 - - * TimeSeriesDataRecorder - - 实现了增量记录,实时和非实时数据处理的功能。 - - * FixedCycleDataRecorder - - 实现了计算固定周期数据剩余size的功能。 - - * TimestampsDataRecorder - - 实现记录时间集合可知的数据记录功能。 - -继承Recorder必须指定data_schema和provider两个字段,系统通过python meta programing的方式对data_schema和recorder class进行了关联: -``` -class Meta(type): - def __new__(meta, name, bases, class_dict): - cls = type.__new__(meta, name, bases, class_dict) - # register the recorder class to the data_schema - if hasattr(cls, 'data_schema') and hasattr(cls, 'provider'): - if cls.data_schema and issubclass(cls.data_schema, Mixin): - print(f'{cls.__name__}:{cls.data_schema.__name__}') - cls.data_schema.register_recorder_cls(cls.provider, cls) - return cls - - -class Recorder(metaclass=Meta): - logger = logging.getLogger(__name__) - - # overwrite them to setup the data you want to record - provider: str = None - data_schema: Mixin = None -``` - - -下面以**个股估值数据**为例对具体步骤做一个说明。 - -## 1. 定义数据 -在domain package(或者其子package)下新建一个文件(module)valuation.py,内容如下: -``` -# -*- coding: utf-8 -*- -from sqlalchemy import Column, String, Float -from sqlalchemy.ext.declarative import declarative_base - -from zvdata import Mixin -from zvdata.contract import register_schema - -ValuationBase = declarative_base() - - -class StockValuation(ValuationBase, Mixin): - __tablename__ = 'stock_valuation' - - code = Column(String(length=32)) - name = Column(String(length=32)) - # 总股本(股) - capitalization = Column(Float) - # 公司已发行的普通股股份总数(包含A股,B股和H股的总股本) - circulating_cap = Column(Float) - # 市值 - market_cap = Column(Float) - # 流通市值 - circulating_market_cap = Column(Float) - # 换手率 - turnover_ratio = Column(Float) - # 静态pe - pe = Column(Float) - # 动态pe - pe_ttm = Column(Float) - # 市净率 - pb = Column(Float) - # 市销率 - ps = Column(Float) - # 市现率 - pcf = Column(Float) - - -class EtfValuation(ValuationBase, Mixin): - __tablename__ = 'etf_valuation' - - code = Column(String(length=32)) - name = Column(String(length=32)) - # 静态pe - pe = Column(Float) - # 动态pe - pe_ttm = Column(Float) - # 市净率 - pb = Column(Float) - # 市销率 - ps = Column(Float) - # 市现率 - pcf = Column(Float) - - -register_schema(providers=['joinquant'], db_name='valuation', schema_base=ValuationBase) - -__all__ = ['StockValuation', 'EtfValuation'] - -``` -将其分解为以下步骤: -### 1.1 数据库base -``` -ValuationBase = declarative_base() -``` -一个数据库可有多个table(schema),table(schema)应继承自该类 - -### 1.2 table(schema)的定义 -``` -class StockValuation(ValuationBase, Mixin): - __tablename__ = 'stock_valuation' - - code = Column(String(length=32)) - name = Column(String(length=32)) - # 总股本(股) - capitalization = Column(Float) - # 公司已发行的普通股股份总数(包含A股,B股和H股的总股本) - circulating_cap = Column(Float) - # 市值 - market_cap = Column(Float) - # 流通市值 - circulating_market_cap = Column(Float) - # 换手率 - turnover_ratio = Column(Float) - # 静态pe - pe = Column(Float) - # 动态pe - pe_ttm = Column(Float) - # 市净率 - pb = Column(Float) - # 市销率 - ps = Column(Float) - # 市现率 - pcf = Column(Float) - - -class EtfValuation(ValuationBase, Mixin): - __tablename__ = 'etf_valuation' - - code = Column(String(length=32)) - name = Column(String(length=32)) - # 静态pe - pe = Column(Float) - # 动态pe - pe_ttm = Column(Float) - # 市净率 - pb = Column(Float) - # 市销率 - ps = Column(Float) - # 市现率 - pcf = Column(Float) -``` -这里定义了两个table(schema),继承ValuationBase表明其隶属的数据库,继承Mixin让其获得zvt统一的字段和方法。 -schema里面的__tablename__为表名。 -### 1.3 注册数据 -``` -register_schema(providers=['joinquant'], db_name='valuation', schema_base=ValuationBase) - -__all__ = ['StockValuation', 'EtfValuation'] -``` -register_schema会将数据注册到zvt的数据系统中,providers为数据的提供商列表,db_name为数据库名字标识,schema_base为上面定义的数据库base。 - -__all__为该module定义的数据结构,为了使得整个系统的数据依赖干净明确,所有的module都应该手动定义该字段。 - - -## 2 实现相应的recorder -``` -# -*- coding: utf-8 -*- - -import pandas as pd -from jqdatasdk import auth, logout, query, valuation, get_fundamentals_continuously - -from zvdata.api import df_to_db -from zvdata.recorder import TimeSeriesDataRecorder -from zvdata.utils.time_utils import now_pd_timestamp, now_time_str, to_time_str -from zvt import zvt_config -from zvt.domain import Stock, StockValuation, EtfStock -from zvt.recorders.joinquant.common import to_jq_entity_id - - -class JqChinaStockValuationRecorder(TimeSeriesDataRecorder): - entity_provider = 'joinquant' - entity_schema = Stock - - # 数据来自jq - provider = 'joinquant' - - data_schema = StockValuation - - def __init__(self, entity_type='stock', exchanges=['sh', 'sz'], entity_ids=None, codes=None, day_data=False, batch_size=10, - force_update=False, sleeping_time=5, default_size=2000, real_time=False, fix_duplicate_way='add', - start_timestamp=None, end_timestamp=None, close_hour=0, close_minute=0) -> None: - super().__init__(entity_type, exchanges, entity_ids, codes, day_data, batch_size, force_update, sleeping_time, - default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour, - close_minute) - auth(zvt_config['jq_username'], zvt_config['jq_password']) - - def on_finish(self): - super().on_finish() - logout() - - def record(self, entity, start, end, size, timestamps): - q = query( - valuation - ).filter( - valuation.code == to_jq_entity_id(entity) - ) - count: pd.Timedelta = now_pd_timestamp() - start - df = get_fundamentals_continuously(q, end_date=now_time_str(), count=count.days + 1, panel=False) - df['entity_id'] = entity.id - df['timestamp'] = pd.to_datetime(df['day']) - df['code'] = entity.code - df['name'] = entity.name - df['id'] = df['timestamp'].apply(lambda x: "{}_{}".format(entity.id, to_time_str(x))) - df = df.rename({'pe_ratio_lyr': 'pe', - 'pe_ratio': 'pe_ttm', - 'pb_ratio': 'pb', - 'ps_ratio': 'ps', - 'pcf_ratio': 'pcf'}, - axis='columns') - - df['market_cap'] = df['market_cap'] * 100000000 - df['circulating_cap'] = df['circulating_cap'] * 100000000 - df['capitalization'] = df['capitalization'] * 10000 - df['circulating_cap'] = df['circulating_cap'] * 10000 - df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) - - return None - -__all__ = ['JqChinaStockValuationRecorder'] -``` - -# 3. 获得的能力 - -# 4. recorder原理 -将各provider提供(或者自己爬取)的数据**变成**符合data schema的数据需要做好以下几点: -* 初始化要抓取的标的 -可抓取单标的来调试,然后抓取全量标的 -* 能够从上次抓取的地方接着抓 -减少不必要的请求,增量抓取 -* 封装常用的请求方式 -对时间序列数据的请求,无非start,end,size,time list的组合 -* 能够自动去重 -* 能够设置抓取速率 -* 提供抓取完成的回调函数 -方便数据校验和多provider数据补全 - -流程图如下: -

\ No newline at end of file diff --git a/docs/data_list.md b/docs/data_list.md deleted file mode 100644 index 57466be8..00000000 --- a/docs/data_list.md +++ /dev/null @@ -1,53 +0,0 @@ -## 支持的数据 -## TODO:数据上传 -``` -from zvt.domain import * -``` - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
dbschema更新方法下载地址
stock_meta
StockStock.record_data(provider='joinquant')
Stock.record_data(provider='eastmoney')
wait...
BlockBlock.record_data(provider='sina')
Block.record_data(provider='eastmoney')
BlockStockBlockStock.record_data(provider='sina')
BlockStock.record_data(provider='eastmoney')
EtfEtf.record_data(provider='joinquant')
EtfStockEtfStock.record_data(provider='joinquant')
IndexIndex.record_data
IndexStockIndexStock.record_data
StockDetailStockDetail.record_data(provider='eastmoney')
\ No newline at end of file diff --git a/docs/data_overview.md b/docs/data_overview.md deleted file mode 100644 index aee77270..00000000 --- a/docs/data_overview.md +++ /dev/null @@ -1,284 +0,0 @@ -## 1. 数据是什么? - -没有数据,量化便成了空中楼阁。 - -那么,在量化中,数据到底是什么? - -zvt对量化数据进行了简洁统一的抽象:数据就是 **投资标的** 在**某时间点(段)** 所**发生的事情**的描述。 - -其中,投资标的,叫**entity**;时间点(段),叫**timestamp**;事情的描述根据事情的不同而具有不同的**属性**。 - -整体结构如下: -

- -### 1.1 投资标的(entity) - -首先,我们得有投资标的。 - -而在整个市场里,投资标的一定会有三个属性: - -* **证券类型(entity_type)** - -股票(stock),债券(bond),期货(future),数字货币(coin),基金(fund)等 - -* **交易所(exchange)** - -上海证券交易所(sh),深圳证券交易所(sz)等 - -* **代码(code)** - -投资标的编码,A股中的000338,601318,数字货币交易中的BTC/USDT,EOS/USDT等 - -所以,zvt里面投资标的的唯一编码(entity_id)为:{entity_type}\_{exchange}\_{code} - -entity基类定义如下: -``` -class EntityMixin(Mixin): - entity_type = Column(String(length=64)) - exchange = Column(String(length=32)) - code = Column(String(length=64)) - name = Column(String(length=128)) -``` - -### 1.2 投资标的发生的事 - -而投资标的发生的事,一定会有三个属性: -* **entity_id** - -投资标的id - -* **timestamp** - -发生时间点(段) - -* **id** - -事件的唯一编码,一般使情况下格式为:{entity_id}_{timestamp} - -entity发生的事情定义如下: -``` -class Mixin(object): - id = Column(String, primary_key=True) - entity_id = Column(String) - - # the meaning could be different for different case,most of time it means 'happen time' - timestamp = Column(DateTime) -``` - ->注意,上面EntityMixin继承了Mixin,如何理解? ->entity的诞生其实也是一个事件,这时,timestamp就代表其上市日。 - -## 2. 数据的稳定性和扩展性 -市场没有新鲜事,市场数据更没有新鲜事。 - -对市场理解越深,就越能定义出稳定的市场数据结构。 - -而对市场的理解并不是一蹴而就的,这就要求数据结构的设计必须具有可扩展性。 - -那么,什么是**稳定**并具有**可扩展性**的数据结构? - -稳定至少要达到以下的标准: -* **标准的字段** - -不管数据来源何处,**确定的语义**在系统里面必须对应**确定的字段**;净资产收益率就叫roe,每股收益就叫eps,毛利率就叫gross_profit_margin。 - -* **完全分类(正交)** - -技术面,基本面,宏观面,消息面等。 - -* **层次关系** - -原始数据和衍生(计算)数据的关系,比如k线数据和各种技术指标;财报和各种财务指标。 - -而扩展性最重要的就是,**容易添加新数据**,并使得新数据无缝融入到系统中。 - -数据定义的目录为[domain](https://github.com/zvtvz/zvt/tree/master/zvt/domain) - -## 3. 系统都有哪些数据? -``` -In [1]: from zvt.domain import * -In [2]: global_schemas -[zvt.domain.dividend_financing.DividendFinancing, - zvt.domain.dividend_financing.DividendDetail, - zvt.domain.dividend_financing.SpoDetail...] -``` - -global_schemas就是系统支持的所有数据,具体含义可以查看相应字段的注释,或者调用相应schema的help方法: -``` -In [3]: DividendFinancing.help() -class DividendFinancing(DividendFinancingBase, Mixin): - __tablename__ = 'dividend_financing' - - provider = Column(String(length=32)) - code = Column(String(length=32)) - - # 分红总额 - dividend_money = Column(Float) - - # 新股 - ipo_issues = Column(Float) - ipo_raising_fund = Column(Float) - - # 增发 - spo_issues = Column(Float) - spo_raising_fund = Column(Float) - # 配股 - rights_issues = Column(Float) - rights_raising_fund = Column(Float) -``` - -## 4. 如何查询数据? -查询数据,调用schema的query_data方法即可;由于该方法极为重要,有必要对其支持的参数进行详细的说明。 - -``` - @classmethod - def query_data(cls, - provider_index: int = 0, - ids: List[str] = None, - entity_ids: List[str] = None, - entity_id: str = None, - codes: List[str] = None, - code: str = None, - level: Union[IntervalLevel, str] = None, - provider: str = None, - columns: List = None, - return_type: str = 'df', - start_timestamp: Union[pd.Timestamp, str] = None, - end_timestamp: Union[pd.Timestamp, str] = None, - filters: List = None, - session: Session = None, - order=None, - limit: int = None, - index: Union[str, list] = None, - time_field: str = 'timestamp'): -``` -* provider_index - -数据支持多provider,可以通过schema.providers来查看,provider_index为其providers的索引,默认为0 - -* ids - -以id列表为过滤条件 - -* entity_ids - -以entity_id列表为过滤条件 - -* entity_id - -指定entity_id为过滤条件 - -* codes - -以entity的code列表为过滤条件 - -* code - -指定entity的code为过滤条件 - -* level - -级别,对k线数据有用 - -* provider - -指定provider,可以通过schema.providers来查看,默认不传,使用provider_index即可 - -* columns - -查询返回的字段列表,类型为字符串或者schema.{column}列表,默认None,返回schema支持的所有字段 - -* return_type - -目前支持df和domain,df为pandas dataframe格式,domain为数据库object,需要做数据库更新操作时使用。 - -* start_timestamp - -开始时间过滤条件 - -* end_timestamp - -结束时间过滤条件 - -* filters - -其他的过滤条件列表,支持标准的[sql查询条件](https://docs.sqlalchemy.org/en/13/orm/tutorial.html#common-filter-operators) - -* session - -操作schema的session,默认None,系统自动分配 - -* order - -排序的方式,schema.{column}.asc()为升序,schema.{column}.desc()为降序 - -* limit - -返回的数量限制,默认None,不限制 - -* index - -返回df时,索引的字段 - -* time_field - -代表时间的字段,默认为timestamp - -### 4.1 一个查询例子 - -2018年年报 roe>8% 营收增长>8% 的前20个股 - -``` -In [37]: from zvt.domain import * -In [38]: df=FinanceFactor.query_data(filters=[FinanceFactor.roe>0.08,FinanceFactor.report_period=='year',FinanceFactor.op_income_growth_yoy>0.08],start_timestamp='2019-01-01',order=FinanceFactor.roe.desc(),limit=20,columns=[FinanceFactor.code]+FinanceFactor.important_cols(),index='code') - -In [39]: df -Out[39]: - code basic_eps total_op_income net_profit op_income_growth_yoy net_profit_growth_yoy roe rota gross_profit_margin net_margin timestamp -code -000048 000048 1.1193 3.437000e+09 4.374000e+08 1.2179 3.8122 0.5495 0.0989 0.4286 0.1308 2019-04-15 -000629 000629 0.3598 1.516000e+10 3.090000e+09 0.6068 2.5796 0.5281 0.2832 0.2752 0.2086 2019-03-26 -000672 000672 1.8100 5.305000e+09 1.472000e+09 0.1563 0.8596 0.5047 0.2289 0.4670 0.2803 2019-04-11 -000912 000912 0.3500 4.405000e+09 3.516000e+08 0.1796 1.2363 4.7847 0.0539 0.2175 0.0795 2019-03-20 -000932 000932 2.2483 9.137000e+10 6.780000e+09 0.1911 0.6453 0.4866 0.1137 0.1743 0.0944 2019-03-28 -002607 002607 0.2200 6.237000e+09 1.153000e+09 0.5472 1.1967 0.7189 0.2209 0.5908 0.1848 2019-04-09 -002959 002959 2.0611 2.041000e+09 1.855000e+08 0.2396 0.2657 0.5055 0.2075 0.3251 0.0909 2019-07-15 -300107 300107 1.1996 1.418000e+09 6.560000e+08 1.6467 6.5338 0.5202 0.4661 0.6379 0.4625 2019-03-15 -300618 300618 3.6900 2.782000e+09 7.076000e+08 0.8994 0.5746 0.4965 0.2504 0.4530 0.2531 2019-04-26 -300776 300776 3.3900 3.649000e+08 1.679000e+08 1.2059 1.5013 0.7122 0.2651 0.6207 0.4602 2019-02-18 -300792 300792 2.7100 1.013000e+09 1.626000e+08 0.4378 0.1799 0.4723 0.3797 0.4259 0.1606 2019-09-16 -600399 600399 2.0100 5.848000e+09 2.607000e+09 0.1732 2.9493 9.6467 0.2979 0.1453 0.4459 2019-03-29 -600408 600408 0.8100 8.816000e+09 8.202000e+08 0.3957 3.9094 0.7501 0.1681 0.1535 0.1020 2019-03-22 -600423 600423 0.9000 2.009000e+09 3.903000e+08 0.0975 5.3411 1.6695 0.1264 0.1404 0.1871 2019-03-19 -600507 600507 2.0800 1.729000e+10 2.927000e+09 0.2396 0.1526 0.5817 0.3216 0.3287 0.1696 2019-02-22 -600678 600678 0.0900 4.240000e+08 3.168000e+07 1.2925 0.0948 0.7213 0.0689 0.2183 0.0742 2019-03-14 -600793 600793 1.6568 1.293000e+09 1.745000e+08 0.1164 0.8868 0.7490 0.0486 0.1622 0.1350 2019-04-30 -600870 600870 0.0087 3.096000e+07 4.554000e+06 0.7773 1.3702 0.7458 0.0724 0.2688 0.1675 2019-03-30 -601003 601003 1.7987 4.735000e+10 4.610000e+09 0.1394 0.7420 0.5264 0.1920 0.1439 0.0974 2019-03-29 -603379 603379 2.9400 4.454000e+09 1.108000e+09 0.1423 0.1609 0.5476 0.3547 0.3959 0.2488 2019-03-13 -``` - -其他schema和查询条件使用方法是一样的,请自行探索。 - -## 5. 如何更新数据? - -调用schema的record_data方法即可。 - -``` -In [17]: FinanceFactor.provider_map_recorder -Out[17]: {'eastmoney': zvt.recorders.eastmoney.finance.china_stock_finance_factor_recorder.ChinaStockFinanceFactorRecorder} - -In [18]: FinanceFactor.record_data(codes=['000338']) -FinanceFactor registered recorders:[] -auth success ( 如需说明文档请查看:https://url.cn/5oB7EOO,更多问题请联系JQData管理员,微信号:JQData02 ) -INFO MainThread 2019-12-15 18:03:35,493 ChinaStockFinanceFactorRecorder:recorder.py:551 evaluate_start_end_size_timestamps entity_id:stock_sz_000338,timestamps start:2002-12-31 00:00:00,end:2019-09-30 00:00:00 -INFO MainThread 2019-12-15 18:03:35,509 ChinaStockFinanceFactorRecorder:recorder.py:556 evaluate_start_end_size_timestamps latest record timestamp:2019-10-31 00:00:00 -INFO MainThread 2019-12-15 18:03:35,510 ChinaStockFinanceFactorRecorder:recorder.py:348 run entity_id:stock_sz_000338,evaluate_start_end_size_timestamps result:None,None,0,None -INFO MainThread 2019-12-15 18:03:35,510 ChinaStockFinanceFactorRecorder:recorder.py:357 run finish recording for entity_id:stock_sz_000338,latest_timestamp:None -已退出 -``` -* codes代表需要抓取的股票代码 -* 不传入codes则是全市场抓取 -* 所有的schema对应的数据更新,方法是一致的 - -定时任务的方式更新可参考[runners](https://github.com/zvtvz/zvt/blob/master/zvt/recorders/eastmoney/finance0_runner.py) \ No newline at end of file diff --git a/docs/data_recorder.md b/docs/data_recorder.md deleted file mode 100644 index 3323055b..00000000 --- a/docs/data_recorder.md +++ /dev/null @@ -1,131 +0,0 @@ -## 1. 财报数据更新 - -定时任务的运行,方法很多,下面是一个参考脚本: -``` -# -*- coding: utf-8 -*- -import logging -import time - -from apscheduler.schedulers.background import BackgroundScheduler - -from zvt import init_log -from zvt.domain import * - -logger = logging.getLogger(__name__) - -sched = BackgroundScheduler() - - -# 自行更改定时运行时间 -@sched.scheduled_job('cron', hour=2, minute=00) -def run(): - while True: - try: - FinanceFactor.record_data() - BalanceSheet.record_data() - IncomeStatement.record_data() - CashFlowStatement.record_data() - break - except Exception as e: - logger.exception('finance recorder error:{}'.format(e)) - time.sleep(60) - - -if __name__ == '__main__': - init_log('eastmoney_finance_recorder.log') - - run() - - sched.start() - - sched._thread.join() -``` - -然后可将该脚本作为后台任务一直运行,ubuntu的参考命令如下: - -``` -nohup python examples/recorders/finance_recorder.py >/dev/null 2>&1 & -``` - -## 2. 实时数据更新 -行情数据的更新,有**普通**和**实时**两种模式;普通模式没有新数据就会退出,适合抓取日线以上级别的数据,实时模式会在交易时间根据周期不停抓取,适合盘中记录分钟级别数据。 - -下面展示一下实时行情抓取的用法。 - -## 2.1 比特币tick数据抓取 - -``` -In [3]: CoinTickKdata.record_data(entity_ids=['coin_binance_BTC/USDT'],real_time=True,force_update=False) -CoinTickKdata registered recorders:[] -INFO MainThread 2019-12-25 18:18:31,928 CoinTickRecorder:recorder.py:349 run entity_id:coin_binance_BTC/USDT,evaluate_start_end_size_timestamps result:2019-12-25 18:10:33.018000,None,97,None -INFO MainThread 2019-12-25 18:18:33,908 CoinTickRecorder:recorder.py:313 persist persist for entity_id:coin_binance_BTC/USDT,time interval:[2019-12-25 18:18:04.353000,2019-12-25 18:18:33.311000] -``` - -打开另外一个终端,读取数据,运行下面代码: -``` -# -*- coding: utf-8 -*- -import time - -import pandas as pd - -from zvt.domain import * -from zvt.reader import * - -r = DataReader(data_schema=CoinTickKdata, provider='ccxt', level='tick') - - -class CoinTickListener(DataListener): - - def on_data_loaded(self, data: pd.DataFrame) -> object: - print(data) - - def on_data_changed(self, data: pd.DataFrame) -> object: - pass - - def on_entity_data_changed(self, entity: str, added_data: pd.DataFrame) -> object: - print(added_data) - - -r.register_data_listener(CoinTickListener()) - -while True: - r.move_on() - time.sleep(2) -``` - -数据刷新如下: -``` -entity_id timestamp -coin_binance_BTC/USDT 2019-12-26 17:07:04.714 coin_binance_BTC/USDT_2019-12-26T17:07:04.714 coin_binance_BTC/USDT 2019-12-26 17:07:04.714 ccxt BTC/USDT BTC/USDT tick None 7230.30 0.030994 224.095918 buy None - 2019-12-26 17:07:04.714 coin_binance_BTC/USDT_2019-12-26T17:07:04.714_... coin_binance_BTC/USDT 2019-12-26 17:07:04.714 ccxt BTC/USDT BTC/USDT tick None 7230.31 0.369006 2668.027772 buy None - 2019-12-26 17:07:05.015 coin_binance_BTC/USDT_2019-12-26T17:07:05.015 coin_binance_BTC/USDT 2019-12-26 17:07:05.015 ccxt BTC/USDT BTC/USDT tick None 7230.30 0.017069 123.413991 buy None - 2019-12-26 17:07:05.015 coin_binance_BTC/USDT_2019-12-26T17:07:05.015_... coin_binance_BTC/USDT 2019-12-26 17:07:05.015 ccxt BTC/USDT BTC/USDT tick None 7230.31 0.043217 312.472307 buy None - 2019-12-26 17:07:05.305 coin_binance_BTC/USDT_2019-12-26T17:07:05.305 coin_binance_BTC/USDT 2019-12-26 17:07:05.305 ccxt BTC/USDT BTC/USDT tick None 7230.30 0.017069 123.413991 buy None - 2019-12-26 17:07:05.305 coin_binance_BTC/USDT_2019-12-26T17:07:05.305_... coin_binance_BTC/USDT 2019-12-26 17:07:05.305 ccxt BTC/USDT BTC/USDT tick None 7230.31 0.043023 311.069627 buy None - 2019-12-26 17:07:05.599 coin_binance_BTC/USDT_2019-12-26T17:07:05.599 coin_binance_BTC/USDT 2019-12-26 17:07:05.599 ccxt BTC/USDT BTC/USDT tick None 7230.31 0.060222 435.423729 buy None - 2019-12-26 17:07:06.463 coin_binance_BTC/USDT_2019-12-26T17:07:06.463 coin_binance_BTC/USDT 2019-12-26 17:07:06.463 ccxt BTC/USDT BTC/USDT tick None 7229.97 0.049591 358.541442 sell None - 2019-12-26 17:07:06.741 coin_binance_BTC/USDT_2019-12-26T17:07:06.741 coin_binance_BTC/USDT 2019-12-26 17:07:06.741 ccxt BTC/USDT BTC/USDT tick None 7230.06 0.259461 1875.918598 sell None - 2019-12-26 17:07:08.646 coin_binance_BTC/USDT_2019-12-26T17:07:08.646 coin_binance_BTC/USDT 2019-12-26 17:07:08.646 ccxt BTC/USDT BTC/USDT tick None 7230.30 0.040835 295.249301 buy None - 2019-12-26 17:07:10.219 coin_binance_BTC/USDT_2019-12-26T17:07:10.219 coin_binance_BTC/USDT 2019-12-26 17:07:10.219 ccxt BTC/USDT BTC/USDT tick None 7230.30 0.001565 11.315420 buy None - 2019-12-26 17:07:11.075 coin_binance_BTC/USDT_2019-12-26T17:07:11.075 coin_binance_BTC/USDT 2019-12-26 17:07:11.075 ccxt BTC/USDT BTC/USDT tick None 7230.19 0.259707 1877.730954 buy None - 2019-12-26 17:07:11.084 coin_binance_BTC/USDT_2019-12-26T17:07:11.084 coin_binance_BTC/USDT 2019-12-26 17:07:11.084 ccxt BTC/USDT BTC/USDT tick None 7229.99 0.190700 1378.759093 sell None - 2019-12-26 17:07:11.785 coin_binance_BTC/USDT_2019-12-26T17:07:11.785 coin_binance_BTC/USDT 2019-12-26 17:07:11.785 ccxt BTC/USDT BTC/USDT tick None 7230.29 0.400000 2892.116000 buy None - 2019-12-26 17:07:12.084 coin_binance_BTC/USDT_2019-12-26T17:07:12.084 coin_binance_BTC/USDT 2019-12-26 17:07:12.084 ccxt BTC/USDT BTC/USDT tick None 7230.30 0.060239 435.546042 buy None - id entity_id timestamp provider code name level order price volume turnover direction order_type -entity_id timestamp -coin_binance_BTC/USDT 2019-12-26 17:07:04.714 coin_binance_BTC/USDT_2019-12-26T17:07:04.714 coin_binance_BTC/USDT 2019-12-26 17:07:04.714 ccxt BTC/USDT BTC/USDT tick None 7230.30 0.030994 224.095918 buy None - 2019-12-26 17:07:04.714 coin_binance_BTC/USDT_2019-12-26T17:07:04.714_... coin_binance_BTC/USDT 2019-12-26 17:07:04.714 ccxt BTC/USDT BTC/USDT tick None 7230.31 0.369006 2668.027772 buy None - 2019-12-26 17:07:05.015 coin_binance_BTC/USDT_2019-12-26T17:07:05.015 coin_binance_BTC/USDT 2019-12-26 17:07:05.015 ccxt BTC/USDT BTC/USDT tick None 7230.30 0.017069 123.413991 buy None - 2019-12-26 17:07:05.015 coin_binance_BTC/USDT_2019-12-26T17:07:05.015_... coin_binance_BTC/USDT 2019-12-26 17:07:05.015 ccxt BTC/USDT BTC/USDT tick None 7230.31 0.043217 312.472307 buy None - 2019-12-26 17:07:05.305 coin_binance_BTC/USDT_2019-12-26T17:07:05.305 coin_binance_BTC/USDT 2019-12-26 17:07:05.305 ccxt BTC/USDT BTC/USDT tick None 7230.30 0.017069 123.413991 buy None - 2019-12-26 17:07:05.305 coin_binance_BTC/USDT_2019-12-26T17:07:05.305_... coin_binance_BTC/USDT 2019-12-26 17:07:05.305 ccxt BTC/USDT BTC/USDT tick None 7230.31 0.043023 311.069627 buy None - 2019-12-26 17:07:05.599 coin_binance_BTC/USDT_2019-12-26T17:07:05.599 coin_binance_BTC/USDT 2019-12-26 17:07:05.599 ccxt BTC/USDT BTC/USDT tick None 7230.31 0.060222 435.423729 buy None - 2019-12-26 17:07:06.463 coin_binance_BTC/USDT_2019-12-26T17:07:06.463 coin_binance_BTC/USDT 2019-12-26 17:07:06.463 ccxt BTC/USDT BTC/USDT tick None 7229.97 0.049591 358.541442 sell None - 2019-12-26 17:07:06.741 coin_binance_BTC/USDT_2019-12-26T17:07:06.741 coin_binance_BTC/USDT 2019-12-26 17:07:06.741 ccxt BTC/USDT BTC/USDT tick None 7230.06 0.259461 1875.918598 sell None - 2019-12-26 17:07:08.646 coin_binance_BTC/USDT_2019-12-26T17:07:08.646 coin_binance_BTC/USDT 2019-12-26 17:07:08.646 ccxt BTC/USDT BTC/USDT tick None 7230.30 0.040835 295.249301 buy None - 2019-12-26 17:07:10.219 coin_binance_BTC/USDT_2019-12-26T17:07:10.219 coin_binance_BTC/USDT 2019-12-26 17:07:10.219 ccxt BTC/USDT BTC/USDT tick None 7230.30 0.001565 11.315420 buy None - 2019-12-26 17:07:11.075 coin_binance_BTC/USDT_2019-12-26T17:07:11.075 coin_binance_BTC/USDT 2019-12-26 17:07:11.075 ccxt BTC/USDT BTC/USDT tick None 7230.19 0.259707 1877.730954 buy None - 2019-12-26 17:07:11.084 coin_binance_BTC/USDT_2019-12-26T17:07:11.084 coin_binance_BTC/USDT 2019-12-26 17:07:11.084 ccxt BTC/USDT BTC/USDT tick None 7229.99 0.190700 1378.759093 sell None - 2019-12-26 17:07:11.785 coin_binance_BTC/USDT_2019-12-26T17:07:11.785 coin_binance_BTC/USDT 2019-12-26 17:07:11.785 ccxt BTC/USDT BTC/USDT tick None 7230.29 0.400000 2892.116000 buy None - 2019-12-26 17:07:12.084 coin_binance_BTC/USDT_2019-12-26T17:07:12.084 coin_binance_BTC/USDT 2019-12-26 17:07:12.084 ccxt BTC/USDT BTC/USDT tick None 7230.30 0.060239 435.546042 buy None -``` diff --git a/docs/data_usage.md b/docs/data_usage.md deleted file mode 100644 index ef617125..00000000 --- a/docs/data_usage.md +++ /dev/null @@ -1 +0,0 @@ -## TODO:数据使用例子展示 \ No newline at end of file diff --git a/docs/design-philosophy.md b/docs/design-philosophy.md deleted file mode 100644 index 53f185a1..00000000 --- a/docs/design-philosophy.md +++ /dev/null @@ -1,24 +0,0 @@ -## 统一性(Unity) -统一性使你能够愉快的思考 - -比如投资标的的唯一标识,zvt里面定义如下 -``` -{entity_type}_{exchange}_{code} -``` -很自然的,你就知道stock_sz_000338,coin_binance_EOS/USDT代表什么. - -比如Recorder,其对所有标的的记录提供了统一的抽象. -比如get_kdata,其对所有的标的的使用方式都是一致的. -比如TechnicalFactor,其对所有标的,所有级别的操作都是一致的. -比如回测和实时交易,Trader提供了一致的处理方式. - -## 分层(Layer) - -分层的核心在于不同层次之间的协议,整个系统的稳定性在于协议的稳定性. -

- -## 扩展性(Scalable) - -- 很容易的在系统中添加数据,并自动获得其他模块的能力. -- 很容易实现自己的factor -- 很容易的扩展自己的trader \ No newline at end of file diff --git a/docs/donate.md b/docs/donate.md deleted file mode 100644 index d92cc127..00000000 --- a/docs/donate.md +++ /dev/null @@ -1,30 +0,0 @@ -## 💡贡献 - -期待能有更多的开发者参与到 zvt 的开发中来,我会保证尽快 Reivew PR 并且及时回复。但提交 PR 请确保 - -1. 通过所有单元测试,如若是新功能,请为其新增单元测试 -2. 遵守开发规范 -3. 如若需要,请更新相对应的文档 - -也非常欢迎开发者能为 zvt 提供更多的示例,共同来完善文档。 - -## 💌请作者喝杯咖啡 - -如果你觉得项目对你有帮助,可以请作者喝杯咖啡 -Alipay      -Wechat - -## 🤝联系方式 - -QQ群:300911873 - -个人微信:foolcage 添加暗号:zvt -Wechat - ------- -公众号: -Wechat - ------- -知乎专栏: -https://zhuanlan.zhihu.com/automoney diff --git a/docs/en/README.md b/docs/en/README.md deleted file mode 100644 index 5f58984a..00000000 --- a/docs/en/README.md +++ /dev/null @@ -1,28 +0,0 @@ -[![github](https://img.shields.io/github/stars/zvtvz/zvt.svg)](https://github.com/zvtvz/zvt) -[![image](https://img.shields.io/pypi/l/zvt.svg)](https://pypi.org/project/zvt/) -[![Build Status](https://api.travis-ci.org/zvtvz/zvt.svg?branch=master)](https://travis-ci.org/zvtvz/zvt) -[![codecov.io](https://codecov.io/github/zvtvz/zvt/coverage.svg?branch=master)](https://codecov.io/github/zvtvz/zvt) -## what's zvt? - -ZVT is a quant trading platform written after rethinking about [fooltrader] (https://github.com/foolcage/fooltrader), which includes scalable data recorder, api, factor calculation, stock picking, backtesting, trading and focus on **low frequency**, **multi-level**, **multi-factors** **multi-targets** full market analysis and trading framework. - -## what else can ZVT be? - - - From the text, zero vector trader, meaning the market is the result of a variety of vector synergy, you could only be a zero vector to see the market clearly. - - From the perspective of shape, Z V T itself coincides with the typical form of the market, implying the importance of market geometry. - - The meaning of the zvt icon, you can interpret it yourself - -

- -## features - -- china market data: stock meta, financial statements, major shareholder behavior, executive transactions, dividend financing details, stock market capital flow, margin financing, dragon and tiger charts, etc. -- crypto currency data -- Standardization of data, multi-data source (provider) cross-validation, completion -- Data recorder is very easy to expand -- Uniform and simple API, support sql query, support pandas -- Scalable factor, abstracting a unified calculation for single- and multi-targets operations -- Provides a unified way of visualizing the factor -- Support multi-targets, multi-factor, multi-level backtesting -- Real-time visualization of factors, trading signals and strategies -- Support a variety of real trading (implementation) \ No newline at end of file diff --git a/docs/en/_sidebar.md b/docs/en/_sidebar.md deleted file mode 100644 index 20620035..00000000 --- a/docs/en/_sidebar.md +++ /dev/null @@ -1,19 +0,0 @@ -- preface - - [introduction](en/README.md "zvt guide") - - [design philosophy](en/design-philosophy.md "zvt design philosophy") - -- usage - - - [quick start](en/quickstart.md "zvt quick start") - - [data usage](en/data_usage.md "zvt data usage") - - [factor usage](en/factor_usage.md "zvt factor usage") - - [selector usage](en/selector_usage.md "zvt selector usage") - - [trader usage](en/trader_usage.md "zvt trader usage") - -- extending - - [extend data](en/data_extending.md "extend zvt data") - - [extend factor](en/factor_extending.md "extend zvt factor") - - [extend selector](en/selector_extending.md "extend zvt selector") - - [extend factor](en/trader_extending.md "extend zvt trader") - -- [Changelog](changelog.md) \ No newline at end of file diff --git a/docs/en/data_extending.md b/docs/en/data_extending.md deleted file mode 100644 index 7cae6cfa..00000000 --- a/docs/en/data_extending.md +++ /dev/null @@ -1,356 +0,0 @@ -## 1. data structure -concepts - -### 1.1 provider - -which means data source provider,e.g joinquant,eastmoney,sina,netease,ccxt - -### 1.2 store category - -logical classification of data,represents one db file physically which storing data schema with relations - -### 1.3 data schema - -the data schema for the db table - -#### *logic view* #### -

- -#### *physical view* #### -

- -> generally speaking,the data schema is stable.Some data would be generated from several provider the main provider would be treated as the provider. -When the data has multiple providers, you could verify each other by specifying the different providers on the api. - - -## 2. Principle of recorder - -To make the data provided by each provider (or crawl it yourself) into data that conforms to the data schema, you need to do the following: - -* Initialize the target to be crawled -Can grab a single target to debug, and then grab the full amount of the targets - -* Ability to grab from the last crawled place -Reduce unnecessary requests, incremental crawling - -* Encapsulate commonly used request methods -For the request of time series data, we abstract them to: start, end, size, time list - -* Ability to automatically handle duplicate - -* Ability to set the crawl rate - -* Provide callback functions for the completion of the fetching -Convenient data verification and multi-provider data completion - -The flow chart is as follows: - -

- -check the details at[*recorder*](https://github.com/zvtvz/zvt/blob/master/zvt/recorders/recorder.py). Part of the project's recorder implementation is provided directly in current project, with some closed source, only the final database file (which will be published in the dropbox and qq group). - -The entire eastmoney recorder is based on the basic recorder class. Basically, a type of data is processed in about 10 lines of code; mastering the method, I believe that everyone can easily write other recorders. - - -## 3. how to add provder - -Here is an example of joinquant - - -### 2.1 add joinquant provider ### - -[*code*](https://github.com/zvtvz/zvt/blob/master/zvt/domain/common.py#L54) - -``` -class Provider(enum.Enum): - # add - JOINQUANT = 'joinquant' -``` - -### 2.2 add store category ### -[*code*](https://github.com/zvtvz/zvt/blob/master/zvt/domain/common.py#L59) - -``` -class StoreCategory(enum.Enum): - meta = 'meta' - stock_1m_kdata = 'stock_1m_kdata' - stock_5m_kdata = 'stock_5m_kdata' - stock_15m_kdata = 'stock_15m_kdata' - stock_1h_kdata = 'stock_1h_kdata' - stock_1d_kdata = 'stock_1d_kdata' - stock_1wk_kdata = 'stock_1wk_kdata' - - etf_1d_kdata = 'etf_1d_kdata' - index_1d_kdata = 'index_1d_kdata' - - finance = 'finance' - dividend_financing = 'dividend_financing' - holder = 'holder' - trading = 'trading' - money_flow = 'money_flow' - macro = 'macro' - business = 'business' - - coin_meta = 'coin_meta' - coin_tick_kdata = 'coin_tick_kdata' - coin_1m_kdata = 'coin_1m_kdata' - coin_5m_kdata = 'coin_5m_kdata' - coin_15m_kdata = 'coin_15m_kdata' - coin_1h_kdata = 'coin_1h_kdata' - coin_1d_kdata = 'coin_1d_kdata' - coin_1wk_kdata = 'coin_1wk_kdata' -``` - -### 2.3 set store category the provider providing### -``` - -provider_map_category = { - 'eastmoney': [StoreCategory.meta, - StoreCategory.finance, - StoreCategory.dividend_financing, - StoreCategory.holder, - StoreCategory.trading], - - Provider.SINA: [StoreCategory.meta, - StoreCategory.etf_1d_kdata, - StoreCategory.stock_1d_kdata, - StoreCategory.money_flow], - - Provider.NETEASE: [StoreCategory.stock_1d_kdata, - StoreCategory.index_1d_kdata], - - Provider.EXCHANGE: [StoreCategory.meta, StoreCategory.macro], - - Provider.ZVT: [StoreCategory.business], - - # TODO:would add other data from joinquant - Provider.JOINQUANT: [StoreCategory.stock_1m_kdata, - StoreCategory.stock_5m_kdata, - StoreCategory.stock_15m_kdata, - StoreCategory.stock_1h_kdata, - StoreCategory.stock_1d_kdata, - StoreCategory.stock_1wk_kdata, ], - - Provider.CCXT: [StoreCategory.coin_meta, - StoreCategory.coin_tick_kdata, - StoreCategory.coin_1m_kdata, - StoreCategory.coin_5m_kdata, - StoreCategory.coin_15m_kdata, - StoreCategory.coin_1h_kdata, - StoreCategory.coin_1d_kdata, - StoreCategory.coin_1wk_kdata], -} - -``` -### 2.4 define data schema ### -[*代码*](https://github.com/zvtvz/zvt/blob/master/zvt/domain/quote.py#L9) - -stock market data structure - -``` -class StockKdataCommon(object): - id = Column(String(length=128), primary_key=True) - provider = Column(String(length=32)) - timestamp = Column(DateTime) - security_id = Column(String(length=128)) - code = Column(String(length=32)) - name = Column(String(length=32)) - # level = Column(Enum(IntervalLevel, values_callable=enum_value)) - level = Column(String(length=32)) - - open = Column(Float) - hfq_open = Column(Float) - qfq_open = Column(Float) - close = Column(Float) - hfq_close = Column(Float) - qfq_close = Column(Float) - high = Column(Float) - hfq_high = Column(Float) - qfq_high = Column(Float) - low = Column(Float) - hfq_low = Column(Float) - qfq_low = Column(Float) - volume = Column(Float) - turnover = Column(Float) - change_pct = Column(Float) - turnover_rate = Column(Float) - factor = Column(Float) - -class Stock1DKdata(Stock1DKdataBase, StockKdataCommon): - __tablename__ = 'stock_1d_kdata' -``` - - -### 2.5 implement the recorder - -[*代码*](https://github.com/zvtvz/zvt/blob/master/zvt/recorders/joinquant/quotes/jq_china_stock__kdata_recorder.py) - -core code - -``` -#Convert joinquant data to standard zvt data - -class MyApiWrapper(ApiWrapper): - def request(self, url=None, method='get', param=None, path_fields=None): - security_item = param['security_item'] - start_timestamp = param['start_timestamp'] - end_timestamp = param['end_timestamp'] - # 不复权 - df = get_price(to_jq_security_id(security_item), start_date=to_time_str(start_timestamp), - end_date=end_timestamp, - frequency=param['jq_level'], - fields=['open', 'close', 'low', 'high', 'volume', 'money'], - skip_paused=True, fq=None) - df.index.name = 'timestamp' - df.reset_index(inplace=True) - df['name'] = security_item.name - df.rename(columns={'money': 'turnover'}, inplace=True) - - df['timestamp'] = pd.to_datetime(df['timestamp']) - df['provider'] = Provider.JOINQUANT.value - df['level'] = param['level'] - - # remove the unfinished kdata - if is_in_trading(entity_type='stock', exchange='sh', timestamp=df.iloc[-1, :]['timestamp']): - df = df.iloc[:-1, :] - - return df.to_dict(orient='records') - -#Completion of re-weighting data -def on_finish(self, security_item): - kdatas = get_kdata(security_id=security_item.id, level=self.level.value, order=StockDayKdata.timestamp.asc(), - return_type='domain', - session=self.session, - filters=[StockDayKdata.hfq_close.is_(None), - StockDayKdata.timestamp >= to_pd_timestamp('2005-01-01')]) - if kdatas: - start = kdatas[0].timestamp - end = kdatas[-1].timestamp - - # get hfq from joinquant - df = get_price(to_jq_security_id(security_item), start_date=to_time_str(start), end_date=now_time_str(), - frequency='daily', - fields=['factor', 'open', 'close', 'low', 'high'], - skip_paused=True, fq='post') - if df is not None and not df.empty: - # fill hfq data - for kdata in kdatas: - if kdata.timestamp in df.index: - kdata.hfq_open = df.loc[kdata.timestamp, 'open'] - kdata.hfq_close = df.loc[kdata.timestamp, 'close'] - kdata.hfq_high = df.loc[kdata.timestamp, 'high'] - kdata.hfq_low = df.loc[kdata.timestamp, 'low'] - kdata.factor = df.loc[kdata.timestamp, 'factor'] - self.session.commit() - - latest_factor = df.factor[-1] - # factor not change yet, no need to reset the qfq past - if latest_factor == self.current_factors.get(security_item.id): - sql = 'UPDATE stock_day_kdata SET qfq_close=hfq_close/{},qfq_high=hfq_high/{}, qfq_open= hfq_open/{}, qfq_low= hfq_low/{} where ' \ - 'security_id=\'{}\' and level=\'{}\' and (qfq_close isnull or qfq_high isnull or qfq_low isnull or qfq_open isnull)'.format( - latest_factor, latest_factor, latest_factor, latest_factor, security_item.id, self.level.value) - else: - sql = 'UPDATE stock_day_kdata SET qfq_close=hfq_close/{},qfq_high=hfq_high/{}, qfq_open= hfq_open/{}, qfq_low= hfq_low/{} where ' \ - 'security_id=\'{}\' and level=\'{}\''.format(latest_factor, - latest_factor, - latest_factor, - latest_factor, - security_item.id, - self.level.value) - self.logger.info(sql) - self.session.execute(sql) - self.session.commit() - - # TODO:use netease provider to get turnover_rate - self.logger.info('use netease provider to get turnover_rate') -``` - -There is an exercise left here. Since the data of the joinquant does not provide the turnover rate and changing percentage, it can be completed by other data sources or by calculation. - -Netease's data has no re-rights factor information, and it is complemented by a joinquant re-rights factor. Similarly, Netease's turnover rate and changing percentage can be used to complement the joinquant data. - -[*参考代码*](https://github.com/zvtvz/zvt/blob/master/zvt/recorders/netease/china_stock_day_kdata_recorder.py) - -### 2.6 runt the recorder - -set your joinquant JQ_ACCOUNT/JQ_PASSWD[settings](https://github.com/zvtvz/zvt/blob/master/zvt/settings.py) - ->Jqdata is currently free for one year, the registered address is as follows: ->https://www.joinquant.com/default/index/sdk?channelId=953cbf5d1b8683f81f0c40c9d4265c0d ->if you need to increase the amount of free usage,please contact me - -``` -if __name__ == '__main__': - parser = argparse.ArgumentParser() - parser.add_argument('--level', help='trading level', default='1d', choices=[item.value for item in IntervalLevel]) - parser.add_argument('--codes', help='codes', default=SAMPLE_STOCK_CODES, nargs='+') - - args = parser.parse_args() - - level = IntervalLevel(args.level) - codes = args.codes - - init_process_log('jq_china_stock_{}_kdata.log'.format(args.level)) - JQChinaStockKdataRecorder(level=level, sleeping_time=0, codes=codes).run() -``` - -the level means different time intervals of the kdata,codes means the stock codes and if you dont' set it,the recorder would get full china market data. - -# 3. automatically acquired ability - -after you add the provider,you could use api,factor,selector,trader,visualization of the zvt basing the data - -``` -In [1]: from zvt.api.technical import * -In [2]: from zvt.api.domain import * -In [3]: df1=get_kdata(security_id='stock_sz_300027', provider='joinquant',start_timestamp='2019-01-01',limit=10) -In [4]: df1 - id provider timestamp security_id code name level open hfq_open qfq_open close hfq_close qfq_close high hfq_high qfq_high low hfq_low qfq_low volume turnover change_pct turnover_rate factor -0 stock_sz_300027_2019-01-02 joinquant 2019-01-02 stock_sz_300027 300027 华谊兄弟 1d 4.54 68.58 4.539918 4.40 66.47 4.400238 4.58 69.19 4.580299 4.35 65.71 4.349927 29554330.0 1.306117e+08 None None 15.106 -1 stock_sz_300027_2019-01-03 joinquant 2019-01-03 stock_sz_300027 300027 华谊兄弟 1d 4.40 66.47 4.400238 4.42 66.77 4.420098 4.45 67.22 4.449887 4.36 65.86 4.359857 15981569.0 7.052363e+07 None None 15.106 -2 stock_sz_300027_2019-01-04 joinquant 2019-01-04 stock_sz_300027 300027 华谊兄弟 1d 4.36 65.86 4.359857 4.52 68.28 4.520058 4.54 68.58 4.539918 4.33 65.41 4.330068 17103081.0 7.657399e+07 None None 15.106 -3 stock_sz_300027_2019-01-07 joinquant 2019-01-07 stock_sz_300027 300027 华谊兄弟 1d 4.54 68.58 4.539918 4.59 69.34 4.590229 4.63 69.94 4.629948 4.48 67.67 4.479677 16163938.0 7.383168e+07 None None 15.106 -4 stock_sz_300027_2019-01-08 joinquant 2019-01-08 stock_sz_300027 300027 华谊兄弟 1d 4.59 69.34 4.590229 4.60 69.49 4.600159 4.66 70.39 4.659738 4.56 68.88 4.559778 10908603.0 5.034655e+07 None None 15.106 -5 stock_sz_300027_2019-01-09 joinquant 2019-01-09 stock_sz_300027 300027 华谊兄弟 1d 4.63 69.94 4.629948 4.58 69.19 4.580299 4.73 71.45 4.729909 4.58 69.19 4.580299 16901976.0 7.881876e+07 None None 15.106 -6 stock_sz_300027_2019-01-10 joinquant 2019-01-10 stock_sz_300027 300027 华谊兄弟 1d 4.63 69.94 4.629948 4.61 69.64 4.610089 4.76 71.90 4.759698 4.59 69.34 4.590229 20855469.0 9.717176e+07 None None 15.106 -7 stock_sz_300027_2019-01-11 joinquant 2019-01-11 stock_sz_300027 300027 华谊兄弟 1d 4.60 69.49 4.600159 4.67 70.55 4.670330 4.67 70.55 4.670330 4.56 68.88 4.559778 13216260.0 6.089670e+07 None None 15.106 -8 stock_sz_300027_2019-01-14 joinquant 2019-01-14 stock_sz_300027 300027 华谊兄弟 1d 4.63 69.94 4.629948 4.57 69.03 4.569707 4.65 70.24 4.649808 4.55 68.73 4.549848 12421993.0 5.705187e+07 None None 15.106 -9 stock_sz_300027_2019-01-15 joinquant 2019-01-15 stock_sz_300027 300027 华谊兄弟 1d 4.56 68.88 4.559778 4.64 70.09 4.639878 4.66 70.39 4.659738 4.54 68.58 4.539918 14403671.0 6.637258e+07 None None 15.106 - - -#compare with netease data -In [24]: df2=get_kdata(security_id='stock_sz_300027', provider='netease',start_timestamp='2019-01-01',limit=10) - -In [25]: df2 -Out[25]: - id provider timestamp security_id code name level open hfq_open qfq_open close hfq_close qfq_close high hfq_high qfq_high low hfq_low qfq_low volume turnover change_pct turnover_rate factor -0 stock_sz_300027_2019-01-02 netease 2019-01-02 stock_sz_300027 300027 华谊兄弟 1d 4.54 68.58 4.539918 4.40 66.47 4.400238 4.58 69.19 4.580299 4.35 65.71 4.349927 29554330.0 1.306117e+08 -6.1834 1.0652 15.106 -1 stock_sz_300027_2019-01-03 netease 2019-01-03 stock_sz_300027 300027 华谊兄弟 1d 4.40 66.47 4.400238 4.42 66.77 4.420098 4.45 67.22 4.449887 4.36 65.86 4.359857 15981569.0 7.052363e+07 0.4545 0.5760 15.106 -2 stock_sz_300027_2019-01-04 netease 2019-01-04 stock_sz_300027 300027 华谊兄弟 1d 4.36 65.86 4.359857 4.52 68.28 4.520058 4.54 68.58 4.539918 4.33 65.41 4.330068 17103081.0 7.657399e+07 2.2624 0.6164 15.106 -3 stock_sz_300027_2019-01-07 netease 2019-01-07 stock_sz_300027 300027 华谊兄弟 1d 4.54 68.58 4.539918 4.59 69.34 4.590229 4.63 69.94 4.629948 4.48 67.67 4.479677 16163938.0 7.383168e+07 1.5487 0.5826 15.106 -4 stock_sz_300027_2019-01-08 netease 2019-01-08 stock_sz_300027 300027 华谊兄弟 1d 4.59 69.34 4.590229 4.60 69.49 4.600159 4.66 70.39 4.659738 4.56 68.88 4.559778 10908603.0 5.034655e+07 0.2179 0.3932 15.106 -5 stock_sz_300027_2019-01-09 netease 2019-01-09 stock_sz_300027 300027 华谊兄弟 1d 4.63 69.94 4.629948 4.58 69.19 4.580299 4.73 71.45 4.729909 4.58 69.19 4.580299 16901976.0 7.881876e+07 -0.4348 0.6092 15.106 -6 stock_sz_300027_2019-01-10 netease 2019-01-10 stock_sz_300027 300027 华谊兄弟 1d 4.63 69.94 4.629948 4.61 69.64 4.610089 4.76 71.90 4.759698 4.59 69.34 4.590229 20855469.0 9.717176e+07 0.6550 0.7517 15.106 -7 stock_sz_300027_2019-01-11 netease 2019-01-11 stock_sz_300027 300027 华谊兄弟 1d 4.60 69.49 4.600159 4.67 70.55 4.670330 4.67 70.55 4.670330 4.56 68.88 4.559778 13216260.0 6.089670e+07 1.3015 0.4763 15.106 -8 stock_sz_300027_2019-01-14 netease 2019-01-14 stock_sz_300027 300027 华谊兄弟 1d 4.63 69.94 4.629948 4.57 69.03 4.569707 4.65 70.24 4.649808 4.55 68.73 4.549848 12421993.0 5.705187e+07 -2.1413 0.4477 15.106 -9 stock_sz_300027_2019-01-15 netease 2019-01-15 stock_sz_300027 300027 华谊兄弟 1d 4.56 68.88 4.559778 4.64 70.09 4.639878 4.66 70.39 4.659738 4.54 68.58 4.539918 14403671.0 6.637258e+07 1.5317 0.5191 15.106 - -``` -the result -``` -In [26]: df1.loc[:,['open','close','high','low','volume']]-df2.loc[:,['open','close','high','low','volume']] -Out[26]: - open close high low volume -0 0.0 0.0 0.0 0.0 0.0 -1 0.0 0.0 0.0 0.0 0.0 -2 0.0 0.0 0.0 0.0 0.0 -3 0.0 0.0 0.0 0.0 0.0 -4 0.0 0.0 0.0 0.0 0.0 -5 0.0 0.0 0.0 0.0 0.0 -6 0.0 0.0 0.0 0.0 0.0 -7 0.0 0.0 0.0 0.0 0.0 -8 0.0 0.0 0.0 0.0 0.0 -9 0.0 0.0 0.0 0.0 0.0 -``` - -Well, the data of the two companies are consistent, and the accuracy of the data is further confirmed. \ No newline at end of file diff --git a/docs/en/data_usage.md b/docs/en/data_usage.md deleted file mode 100644 index d739f3f5..00000000 --- a/docs/en/data_usage.md +++ /dev/null @@ -1,140 +0,0 @@ -## 目前支持的数据 - -### 股票数据 -|名称|dataschema|provider|comments| download| -|-----------|--------|--------|-----|-----| -|个股资料|Stock|eastmoney,sina|个股和板块为多对多的关系| -|板块资料|Index|eastmoney,sina|板块有行业,概念,区域三个分类的维度,不同的provider分类会有所不同,个股和板块为多对多的关系| -|个股行情|Stock{level}Kdata|joinquant,netease,eastmoney|支持1,5,15,30,60分钟, 日线,周线级别| -|指数日线行情|Index1DKdata|eastmoney,sina,joinquant|指数本质上也是一种板块,指数对应板块资料中的标的| -|个股资金流|MoneyFlow|eastmoney,sina,joinquant|| -|板块资金流|MoneyFlow|eastmoney,sina,joinquant|对应板块资料里面的标的| -|分红融资数据|DividendFinancing|eastmoney|企业最后的底线就是能不能给投资者赚钱,此为年度统计信息| -|分红明细|DividendDetail|eastmoney|| -|融资明细|SPODetail|eastmoney|| -|配股明细|RightsIssueDetail|eastmoney|| -|主要财务指标|FinanceFactor|eastmoney|| -|资产负债表|BalanceSheet|eastmoney|| -|利润表|IncomeStatement|eastmoney|| -|现金流量表|CashFlowStatement|eastmoney|| -|十大股东|TopTenHolder|eastmoney|| -|十大流通股东|TopTenTradableHolder|eastmoney|| -|机构持股|InstitutionalInvestorHolder|eastmoney|| -|高管交易|ManagerTrading|eastmoney|| -|大股东交易|HolderTrading|eastmoney|| -|大宗交易|BigDealTrading|eastmoney|| -|融资融券|MarginTrading|eastmoney|| -|龙虎榜数据|DragonAndTiger|eastmoney|| - -### 数字货币数据 - -|名称|dataschema|provider|comments| download| -|-----------|--------|--------|-----|-----| -|货币资料|Coin|ccxt|| -|行情|Coin{level}Kdata|ccxt|支持tick,1,5,15,30,60分钟, 日线,周线级别| - -### 期货数据 -> 待支持 - - -## 使用示例 - -### 个股K线 -``` -In [5]: from zvt.api import technical -In [6]: df= technical.get_kdata(security_id='stock_sz_000338',provider='joinquant') -In [7]: df.tail() -Out[7]: - id provider timestamp security_id code name level open hfq_open qfq_open close hfq_close qfq_close high hfq_high qfq_high low hfq_low qfq_low volume turnover change_pct turnover_rate factor -timestamp -2019-06-25 stock_sz_000338_2019-06-25 joinquant 2019-06-25 stock_sz_000338 000338 潍柴动力 1d 12.55 235.41 12.549845 12.28 230.35 12.280094 12.56 235.60 12.559974 12.08 226.60 12.080179 75627481.0 9.256614e+08 None None 18.758 -2019-06-26 stock_sz_000338_2019-06-26 joinquant 2019-06-26 stock_sz_000338 000338 潍柴动力 1d 12.20 228.85 12.200128 12.25 229.79 12.250240 12.38 232.22 12.379785 12.12 227.35 12.120162 39932435.0 4.891142e+08 None None 18.758 -2019-06-27 stock_sz_000338_2019-06-27 joinquant 2019-06-27 stock_sz_000338 000338 潍柴动力 1d 12.25 229.79 12.250240 12.25 229.79 12.250240 12.25 229.79 12.250240 12.25 229.79 12.250240 0.0 0.000000e+00 None None 18.758 -2019-06-28 stock_sz_000338_2019-06-28 joinquant 2019-06-28 stock_sz_000338 000338 潍柴动力 1d 12.23 229.41 12.229982 12.29 230.54 12.290223 12.44 233.35 12.440026 12.23 229.41 12.229982 43280844.0 5.325563e+08 None None 18.758 -2019-07-01 stock_sz_000338_2019-07-01 joinquant 2019-07-01 stock_sz_000338 000338 潍柴动力 1d 12.50 234.48 12.500267 12.89 241.79 12.889967 12.95 242.92 12.950208 12.41 232.79 12.410172 101787878.0 1.291295e+09 None None 18.758 - -``` - -### 数字货币k线 -``` -In [10]: df= technical.get_kdata(security_id='coin_binance_EOS/USDT',provider='ccxt') -In [10]: df.tail() -Out[10]: - id provider timestamp security_id code name level open close high low volume turnover -timestamp -2019-06-26 coin_binance_EOS/USDT_2019-06-26 ccxt 2019-06-26 coin_binance_EOS/USDT EOS/USDT EOS/USDT 1d 7.1736 6.8096 7.4475 6.1000 16934720.29 None -2019-06-27 coin_binance_EOS/USDT_2019-06-27 ccxt 2019-06-27 coin_binance_EOS/USDT EOS/USDT EOS/USDT 1d 6.8082 5.9663 6.8557 5.6329 20215677.51 None -2019-06-28 coin_binance_EOS/USDT_2019-06-28 ccxt 2019-06-28 coin_binance_EOS/USDT EOS/USDT EOS/USDT 1d 5.9742 6.2182 6.2918 5.7625 12172080.98 None -2019-06-29 coin_binance_EOS/USDT_2019-06-29 ccxt 2019-06-29 coin_binance_EOS/USDT EOS/USDT EOS/USDT 1d 6.2206 6.3302 6.3915 5.9566 7403462.75 None -2019-06-30 coin_binance_EOS/USDT_2019-06-30 ccxt 2019-06-30 coin_binance_EOS/USDT EOS/USDT EOS/USDT 1d 6.3282 5.7926 6.3966 5.6894 8043978.96 None - -``` - -### 社保持仓 -``` -In [11]: from zvt.domain import * - from zvt.api import fundamental -In [12]: df = get_top_ten_tradable_holder(start_timestamp='2018-09-30',filters=[TopTenTradableHolder.holder_name.like('%社保%')],order=TopTenTradableHolder.shareholding_ratio.desc()) - -In [18]: df.tail() -Out[18]: - id provider timestamp security_id code report_period report_date holder_code holder_name shareholding_numbers shareholding_ratio change change_ratio -timestamp -2019-03-31 stock_sz_000778_2019-03-31 00:00:00_全国社保基金五零三组合 None 2019-03-31 stock_sz_000778 000778 ReportPeriod.season1 2019-03-31 70010503 全国社保基金五零三组合 60000000.0 0.0153 1000000.0 0.0169 -2019-03-31 stock_sz_002572_2019-03-31 00:00:00_全国社保基金一零九组合 None 2019-03-31 stock_sz_002572 002572 ReportPeriod.season1 2019-03-31 70010109 全国社保基金一零九组合 7520000.0 0.0118 -8013000.0 -0.5159 -2019-03-31 stock_sz_000338_2019-03-31 00:00:00_全国社保基金一零二组合 None 2019-03-31 stock_sz_000338 000338 ReportPeriod.season1 2019-03-31 70010102 全国社保基金一零二组合 44000000.0 0.0071 -6000000.0 -0.1200 -2019-03-31 stock_sz_000338_2019-03-31 00:00:00_全国社保基金一零一组合 None 2019-03-31 stock_sz_000338 000338 ReportPeriod.season1 2019-03-31 70010101 全国社保基金一零一组合 36850000.0 0.0060 NaN NaN -2019-03-31 stock_sz_000778_2019-03-31 00:00:00_全国社保基金四一三组合 None 2019-03-31 stock_sz_000778 000778 ReportPeriod.season1 2019-03-31 70010413 全国社保基金四一三组合 17800000.0 0.0045 NaN NaN - -``` - -### 马云持仓 ### -```bash -In [26]: df = get_top_ten_tradable_holder(filters=[TopTenTradableHolder.holder_name=='马云']) -Out[27]: - holder_name code shareholding_numbers shareholding_ratio change change_ratio -0 马云 002204 460800.0 0.0085 NaN NaN -1 马云 300027 3912000.0 0.0205 NaN NaN -2 马云 300027 8319000.0 0.0230 NaN NaN -3 马云 300027 8319000.0 0.0230 NaN NaN - -22 马云 300027 99780000.0 0.0520 NaN NaN -23 马云 300027 99780000.0 0.0520 NaN NaN -24 马云 300027 99780000.0 0.0451 NaN NaN -``` -### 2018年报eps前50 -```bash -In [30]: df = get_finance_factor(start_timestamp='2018-12-31',order=FinanceFactor.basic_eps.desc(),limit=50,columns=[FinanceFactor.code,FinanceFactor.timestamp,FinanceFactor.basic_eps]) -Out[31]: - code timestamp basic_eps -0 600519 2018-12-31 28.0200 -1 603444 2018-12-31 10.1200 -2 601318 2018-12-31 6.0200 -3 000661 2018-12-31 5.9200 - -47 603393 2018-12-31 2.0900 -48 601869 2018-12-31 2.0900 -49 600507 2018-12-31 2.0800 - -``` - -更多api和相应的数据,可查看代码: -[*data schema*](https://github.com/zvtvz/zvt/tree/master/zvt/domain) -[*data api*](https://github.com/zvtvz/zvt/tree/master/zvt/api) - -filters参数的使用请参考[*sqlalchemy*](https://docs.sqlalchemy.org/en/13/orm/query.html),SQL能做的查询都能做 - -## SQL查询 -你也可以直接使用项目中的sqlite数据库,利用你熟悉的工具,语言来进行研究 - -比如:查看某段时间整个市场的高管增持减持 -``` -select * from manager_trading where volume < 0 and timestamp > '2018-01-01'; -select count(id) from manager_trading where volume < 0 and timestamp > '2018-01-01'; - -select * from manager_trading where volume > 0 and timestamp > '2018-01-01'; -select count(id) from manager_trading where volume > 0 and timestamp > '2018-01-01' -``` -

- -库都给你了,SQL大神,请开始你的表演 \ No newline at end of file diff --git a/docs/en/design-philosophy.md b/docs/en/design-philosophy.md deleted file mode 100644 index 53f185a1..00000000 --- a/docs/en/design-philosophy.md +++ /dev/null @@ -1,24 +0,0 @@ -## 统一性(Unity) -统一性使你能够愉快的思考 - -比如投资标的的唯一标识,zvt里面定义如下 -``` -{entity_type}_{exchange}_{code} -``` -很自然的,你就知道stock_sz_000338,coin_binance_EOS/USDT代表什么. - -比如Recorder,其对所有标的的记录提供了统一的抽象. -比如get_kdata,其对所有的标的的使用方式都是一致的. -比如TechnicalFactor,其对所有标的,所有级别的操作都是一致的. -比如回测和实时交易,Trader提供了一致的处理方式. - -## 分层(Layer) - -分层的核心在于不同层次之间的协议,整个系统的稳定性在于协议的稳定性. -

- -## 扩展性(Scalable) - -- 很容易的在系统中添加数据,并自动获得其他模块的能力. -- 很容易实现自己的factor -- 很容易的扩展自己的trader \ No newline at end of file diff --git a/docs/en/factor_usage.md b/docs/en/factor_usage.md deleted file mode 100644 index d178677c..00000000 --- a/docs/en/factor_usage.md +++ /dev/null @@ -1,109 +0,0 @@ -## 什么是factor - -一般来说,人们常把macd,kdj,roe,pe等当作指标,zvt中把他们叫做indicator,甚至什么也不是,就只是普通的value;factor在zvt中有更高层次的抽象.这个会在如何扩展factor里面会做详细说明,你现在只需要知道,zvt里面的factor分为三类:FilterFactor,ScoreFacor,StateFactor. - -## TechnicalFactor - -> 该factor为计算各种技术指标的算法模板类,基于它可以构造出用于选股和交易的Factor - -``` - factor = TechnicalFactor(codes=['000338'], start_timestamp='2018-01-01', end_timestamp='2019-02-01', - indicators=['ma', 'ma'], - indicators_param=[{'window': 5}, {'window': 10}]) - factor.draw_with_indicators() -``` - -factor本身是可以draw的,并且可以在notebook中使用: -

- -你甚至可以把所有标的的factor都draw成一致的图片,然后用机器学习来对其进行分析,需要做的也只是把codes遍历一遍. - -## 技术买卖指标 -基于TechnicalFactor你可以构造自己的FilterFactor,比如,均线交叉买卖: -``` -class CrossMaFactor(TechnicalFactor): - def __init__(self, - entity_ids: List[str] = None, - entity_type: Union[str, SecurityType] = SecurityType.stock, - exchanges: List[str] = ['sh', 'sz'], - codes: List[str] = None, - the_timestamp: Union[str, pd.Timestamp] = None, - start_timestamp: Union[str, pd.Timestamp] = None, - end_timestamp: Union[str, pd.Timestamp] = None, - columns: List = None, filters: List = None, - provider: Union[str, Provider] = 'netease', - level: IntervalLevel = IntervalLevel.LEVEL_1DAY, - - - category_field: str = 'security_id', - # child added arguments - short_window=5, - long_window=10) -> None: - self.short_window = short_window - self.long_window = long_window - - super().__init__(entity_ids, entity_type, exchanges, codes, the_timestamp, start_timestamp, end_timestamp, - columns, filters, provider, level, category_field, - indicators=['ma', 'ma'], - indicators_param=[{'window': short_window}, {'window': long_window}], valid_window=long_window) - - def compute(self): - super().compute() - s = self.pipe_df['ma{}'.format(self.short_window)] > self.pipe_df['ma{}'.format(self.long_window)] - self.result_df = s.to_frame(name='score') - - def on_category_data_added(self, category, added_data: pd.DataFrame): - super().on_category_data_added(category, added_data) - # TODO:improve it to just computing the added data - self.compute() -``` - -其本身也是可以draw的: -``` -cross = CrossMaFactor(codes=['000338'], start_timestamp='2018-01-01', end_timestamp='2019-02-01',provider='joinquant') -cross.draw_result(render='notebook') -``` -

- -## ScoreFactor - -[**ScoreFactor**](https://github.com/zvtvz/zvt/blob/master/zvt/factors/factor.py#L138)内置了分位数算法(quantile),你可以非常方便的对其进行扩展. - -下面展示一个例子:对个股的**营收,利润增速,资金,净资产收益率**进行评分 -``` -class FinanceGrowthFactor(ScoreFactor): - - def __init__(self, - entity_ids: List[str] = None, - entity_type: Union[str, SecurityType] = SecurityType.stock, - exchanges: List[str] = ['sh', 'sz'], - codes: List[str] = None, - the_timestamp: Union[str, pd.Timestamp] = None, - start_timestamp: Union[str, pd.Timestamp] = None, - end_timestamp: Union[str, pd.Timestamp] = None, - columns: List = [FinanceFactor.op_income_growth_yoy, FinanceFactor.net_profit_growth_yoy, - FinanceFactor.rota, - FinanceFactor.roe], - filters: List = None, - provider: Union[str, Provider] = 'eastmoney', - level: IntervalLevel = IntervalLevel.LEVEL_1DAY, - - - category_field: str = 'security_id', - keep_all_timestamp: bool = True, - fill_method: str = 'ffill', - effective_number: int = None, - depth_computing_method='ma', - depth_computing_param={'window': '365D', 'on': 'timestamp'}, - breadth_computing_method='quantile', - breadth_computing_param={'score_levels': [0.1, 0.3, 0.5, 0.7, 0.9]}) -> None: - super().__init__(FinanceFactor, entity_ids, entity_type, exchanges, codes, the_timestamp, start_timestamp, - end_timestamp, columns, filters, provider, level, category_field, - keep_all_timestamp, fill_method, effective_number, depth_computing_method, - depth_computing_param, breadth_computing_method, breadth_computing_param) - -``` - -## StateFactor - -> 可在该类Factor中实现趋势,震荡,缠论笔,段,中枢之类的状态机 \ No newline at end of file diff --git a/docs/en/quickstart.md b/docs/en/quickstart.md deleted file mode 100644 index 8027c876..00000000 --- a/docs/en/quickstart.md +++ /dev/null @@ -1,94 +0,0 @@ -## install - -> Since the project can be customized and expanded in many cases, the pip installing is only suitable for demonstration, please clone the code directly and refer to related documents. - -``` -pip install zvt -``` -upgrade -``` -pip install -U zvt -``` - -clone the code(**recommended way**) - -``` -git clone https://github.com/zvtvz/zvt.git -``` -**assume that you operate in the project root directory** -**** - -## setup env - -- python>=3.6(recommended using virtualenv) - -- install requirements -``` -pip install -r requirements.txt -``` - -**assume the env is ready** - -## init sample data -``` -python init_data_sample.py -``` - -The script decompresses the data sample which the tests and examples in the project depend on. - -please change the DATA_PATH,if you're using the full data coming from the recorders - -``` -# please change the path to your real store path -DATA_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'datasample')) - -``` - - -for how to use and extend the data, please refer to [data usage](./data_usage.md) - -## run the main interface -``` -python main.py -``` -you could run zvt/main.py in pycharm directly - - -open your browser:[127.0.0.1:8050](http://127.0.0.1:8050) - -## run the strategies -[trader examples](https://github.com/zvtvz/zvt/tree/master/examples/trader) - -set the factors for your trader,e.g,cross ma -``` -class MyMaTrader(StockTrader): - def init_selectors(self, entity_ids, entity_type, exchanges, codes, start_timestamp, end_timestamp): - myselector = TargetSelector(entity_ids=entity_ids, entity_type=entity_type, exchanges=exchanges, - codes=codes, start_timestamp=start_timestamp, end_timestamp=end_timestamp) - - myselector.add_filter_factor( - CrossMaFactor(entity_ids=entity_ids, entity_type=entity_type, exchanges=exchanges, - codes=codes, start_timestamp=start_timestamp, end_timestamp=end_timestamp)) - - self.selectors.append(myselector) - -``` - -set the targets as many as you want -``` - # single stock with cross ma factor - MyMaTrader(codes=['000338'], level=IntervalLevel.LEVEL_1DAY, start_timestamp='2018-01-01', - end_timestamp='2019-06-30', trader_name='000338_ma_trader').run() - - # multiple stocks with bull factor - MyBullTrader(codes=SAMPLE_STOCK_CODES, level=IntervalLevel.LEVEL_1DAY, start_timestamp='2018-01-01', - end_timestamp='2019-06-30', trader_name='sample_stocks_bull_trader').run() -``` - - -open the running interface from the browser,check the results -

- -The above is the market value curve of the strategy. The following is the k-line diagram of the targetS and trading signal mark, as well as the factor used in the strategy. - -**You can see the factor and trading signals used in the strategy, so you can judge the accuracy and performance of the strategy very intuitively.** \ No newline at end of file diff --git a/docs/en/selector_usage.md b/docs/en/selector_usage.md deleted file mode 100644 index a77e6de4..00000000 --- a/docs/en/selector_usage.md +++ /dev/null @@ -1,63 +0,0 @@ -## what's selector - -selector是根据factor来对标的进行综合评价,生成**开多,开空,持多,持空**标的选择器,其比一般的所谓**选股功能**要强大得多. - -## 构造自己的selector - -### TechnicalSelector - -``` -class TechnicalSelector(TargetSelector): - def __init__(self, entity_ids=None, entity_type=SecurityType.stock, exchanges=['sh', 'sz'], codes=None, - the_timestamp=None, start_timestamp=None, end_timestamp=None, long_threshold=0.8, short_threshold=-0.8, - level=IntervalLevel.LEVEL_1DAY, - provider='joinquant') -> None: - super().__init__(entity_ids, entity_type, exchanges, codes, the_timestamp, start_timestamp, end_timestamp, - long_threshold, short_threshold, level, provider) - - def init_factors(self, entity_ids, entity_type, exchanges, codes, the_timestamp, start_timestamp, - end_timestamp): - bull_factor = BullFactor(entity_ids=entity_ids, entity_type=entity_type, exchanges=exchanges, - codes=codes, the_timestamp=the_timestamp, start_timestamp=start_timestamp, - end_timestamp=end_timestamp, provider=self.provider, level=self.level) - - self.filter_factors = [bull_factor] - -s = TechnicalSelector(codes=SAMPLE_STOCK_CODES, start_timestamp='2018-01-01', end_timestamp='2019-06-30') -s.run() -s.draw() -``` - -该selector选取macd黄白线在0轴上的标的,选取完成后,可以运行draw来获取选中标的的表格. - -

- - -### FundamentalSelector -``` -from zvt.factors.finance_factor import FinanceGrowthFactor -from zvt.selectors.selector import TargetSelector - - -class FundamentalSelector(TargetSelector): - def init_factors(self, entity_ids, entity_type, exchanges, codes, the_timestamp, start_timestamp, - end_timestamp): - factor = FinanceGrowthFactor(entity_ids=entity_ids, entity_type=entity_type, exchanges=exchanges, - codes=codes, the_timestamp=the_timestamp, start_timestamp=start_timestamp, - end_timestamp=end_timestamp, keep_all_timestamp=True, provider=self.provider) - self.score_factors.append(factor) - -selector: TargetSelector = FundamentalSelector(start_timestamp='2018-01-01', end_timestamp='2019-06-30') -selector.run() -selector.draw() -``` - -该selector选取成长性评分在0.8以上的个股. - -

- -> 注意:使用到的ScoreFactor是对全市场和历史数据进行运算,比较耗时,但运算结果可以直接进行回测. - -### 多factor运算 - -只需要在init_factors里面继续添加factor即可. \ No newline at end of file diff --git a/docs/factor.md b/docs/factor.md deleted file mode 100644 index d178677c..00000000 --- a/docs/factor.md +++ /dev/null @@ -1,109 +0,0 @@ -## 什么是factor - -一般来说,人们常把macd,kdj,roe,pe等当作指标,zvt中把他们叫做indicator,甚至什么也不是,就只是普通的value;factor在zvt中有更高层次的抽象.这个会在如何扩展factor里面会做详细说明,你现在只需要知道,zvt里面的factor分为三类:FilterFactor,ScoreFacor,StateFactor. - -## TechnicalFactor - -> 该factor为计算各种技术指标的算法模板类,基于它可以构造出用于选股和交易的Factor - -``` - factor = TechnicalFactor(codes=['000338'], start_timestamp='2018-01-01', end_timestamp='2019-02-01', - indicators=['ma', 'ma'], - indicators_param=[{'window': 5}, {'window': 10}]) - factor.draw_with_indicators() -``` - -factor本身是可以draw的,并且可以在notebook中使用: -

- -你甚至可以把所有标的的factor都draw成一致的图片,然后用机器学习来对其进行分析,需要做的也只是把codes遍历一遍. - -## 技术买卖指标 -基于TechnicalFactor你可以构造自己的FilterFactor,比如,均线交叉买卖: -``` -class CrossMaFactor(TechnicalFactor): - def __init__(self, - entity_ids: List[str] = None, - entity_type: Union[str, SecurityType] = SecurityType.stock, - exchanges: List[str] = ['sh', 'sz'], - codes: List[str] = None, - the_timestamp: Union[str, pd.Timestamp] = None, - start_timestamp: Union[str, pd.Timestamp] = None, - end_timestamp: Union[str, pd.Timestamp] = None, - columns: List = None, filters: List = None, - provider: Union[str, Provider] = 'netease', - level: IntervalLevel = IntervalLevel.LEVEL_1DAY, - - - category_field: str = 'security_id', - # child added arguments - short_window=5, - long_window=10) -> None: - self.short_window = short_window - self.long_window = long_window - - super().__init__(entity_ids, entity_type, exchanges, codes, the_timestamp, start_timestamp, end_timestamp, - columns, filters, provider, level, category_field, - indicators=['ma', 'ma'], - indicators_param=[{'window': short_window}, {'window': long_window}], valid_window=long_window) - - def compute(self): - super().compute() - s = self.pipe_df['ma{}'.format(self.short_window)] > self.pipe_df['ma{}'.format(self.long_window)] - self.result_df = s.to_frame(name='score') - - def on_category_data_added(self, category, added_data: pd.DataFrame): - super().on_category_data_added(category, added_data) - # TODO:improve it to just computing the added data - self.compute() -``` - -其本身也是可以draw的: -``` -cross = CrossMaFactor(codes=['000338'], start_timestamp='2018-01-01', end_timestamp='2019-02-01',provider='joinquant') -cross.draw_result(render='notebook') -``` -

- -## ScoreFactor - -[**ScoreFactor**](https://github.com/zvtvz/zvt/blob/master/zvt/factors/factor.py#L138)内置了分位数算法(quantile),你可以非常方便的对其进行扩展. - -下面展示一个例子:对个股的**营收,利润增速,资金,净资产收益率**进行评分 -``` -class FinanceGrowthFactor(ScoreFactor): - - def __init__(self, - entity_ids: List[str] = None, - entity_type: Union[str, SecurityType] = SecurityType.stock, - exchanges: List[str] = ['sh', 'sz'], - codes: List[str] = None, - the_timestamp: Union[str, pd.Timestamp] = None, - start_timestamp: Union[str, pd.Timestamp] = None, - end_timestamp: Union[str, pd.Timestamp] = None, - columns: List = [FinanceFactor.op_income_growth_yoy, FinanceFactor.net_profit_growth_yoy, - FinanceFactor.rota, - FinanceFactor.roe], - filters: List = None, - provider: Union[str, Provider] = 'eastmoney', - level: IntervalLevel = IntervalLevel.LEVEL_1DAY, - - - category_field: str = 'security_id', - keep_all_timestamp: bool = True, - fill_method: str = 'ffill', - effective_number: int = None, - depth_computing_method='ma', - depth_computing_param={'window': '365D', 'on': 'timestamp'}, - breadth_computing_method='quantile', - breadth_computing_param={'score_levels': [0.1, 0.3, 0.5, 0.7, 0.9]}) -> None: - super().__init__(FinanceFactor, entity_ids, entity_type, exchanges, codes, the_timestamp, start_timestamp, - end_timestamp, columns, filters, provider, level, category_field, - keep_all_timestamp, fill_method, effective_number, depth_computing_method, - depth_computing_param, breadth_computing_method, breadth_computing_param) - -``` - -## StateFactor - -> 可在该类Factor中实现趋势,震荡,缠论笔,段,中枢之类的状态机 \ No newline at end of file diff --git a/docs/favicon.ico b/docs/favicon.ico deleted file mode 100644 index e892b0e5..00000000 Binary files a/docs/favicon.ico and /dev/null differ diff --git a/docs/imgs/alipay.png b/docs/imgs/alipay.png deleted file mode 100644 index fc31fb03..00000000 Binary files a/docs/imgs/alipay.png and /dev/null differ diff --git a/docs/imgs/architecture.png b/docs/imgs/architecture.png deleted file mode 100644 index ccdfe9b8..00000000 Binary files a/docs/imgs/architecture.png and /dev/null differ diff --git a/docs/imgs/big-picture.jpg b/docs/imgs/big-picture.jpg new file mode 100644 index 00000000..aae26649 Binary files /dev/null and b/docs/imgs/big-picture.jpg differ diff --git a/docs/imgs/data-usage/balance-sheet.gif b/docs/imgs/data-usage/balance-sheet.gif deleted file mode 100644 index 3019301a..00000000 Binary files a/docs/imgs/data-usage/balance-sheet.gif and /dev/null differ diff --git a/docs/imgs/data-usage/compare-dividend.gif b/docs/imgs/data-usage/compare-dividend.gif deleted file mode 100644 index 9aba8779..00000000 Binary files a/docs/imgs/data-usage/compare-dividend.gif and /dev/null differ diff --git a/docs/imgs/data-usage/compare-pe.gif b/docs/imgs/data-usage/compare-pe.gif deleted file mode 100644 index 0f0d413d..00000000 Binary files a/docs/imgs/data-usage/compare-pe.gif and /dev/null differ diff --git a/docs/imgs/data-usage/cross-market-trading.gif b/docs/imgs/data-usage/cross-market-trading.gif deleted file mode 100644 index 60655ab8..00000000 Binary files a/docs/imgs/data-usage/cross-market-trading.gif and /dev/null differ diff --git a/docs/imgs/data-usage/filter-entity.gif b/docs/imgs/data-usage/filter-entity.gif deleted file mode 100644 index afa21dfc..00000000 Binary files a/docs/imgs/data-usage/filter-entity.gif and /dev/null differ diff --git a/docs/imgs/data-usage/margin-trending.gif b/docs/imgs/data-usage/margin-trending.gif deleted file mode 100644 index dc13b668..00000000 Binary files a/docs/imgs/data-usage/margin-trending.gif and /dev/null differ diff --git a/docs/imgs/data-usage/money-flow.gif b/docs/imgs/data-usage/money-flow.gif deleted file mode 100644 index 5e5ac0a9..00000000 Binary files a/docs/imgs/data-usage/money-flow.gif and /dev/null differ diff --git a/docs/imgs/data-usage/sh-pe-distribution.gif b/docs/imgs/data-usage/sh-pe-distribution.gif deleted file mode 100644 index cd4fdf47..00000000 Binary files a/docs/imgs/data-usage/sh-pe-distribution.gif and /dev/null differ diff --git a/docs/imgs/data-usage/stock-price-distribution.gif b/docs/imgs/data-usage/stock-price-distribution.gif deleted file mode 100644 index 99544311..00000000 Binary files a/docs/imgs/data-usage/stock-price-distribution.gif and /dev/null differ diff --git a/docs/imgs/data-usage/tech-factor.gif b/docs/imgs/data-usage/tech-factor.gif deleted file mode 100644 index 5eb0886d..00000000 Binary files a/docs/imgs/data-usage/tech-factor.gif and /dev/null differ diff --git a/docs/imgs/data_structure.png b/docs/imgs/data_structure.png deleted file mode 100644 index be121abe..00000000 Binary files a/docs/imgs/data_structure.png and /dev/null differ diff --git a/docs/imgs/data_structure_physics.png b/docs/imgs/data_structure_physics.png deleted file mode 100644 index 1b5f69e6..00000000 Binary files a/docs/imgs/data_structure_physics.png and /dev/null differ diff --git a/docs/imgs/factor-in-notebook.gif b/docs/imgs/factor-in-notebook.gif deleted file mode 100644 index 80bb105a..00000000 Binary files a/docs/imgs/factor-in-notebook.gif and /dev/null differ diff --git a/docs/imgs/factor-result-in-notebook.gif b/docs/imgs/factor-result-in-notebook.gif deleted file mode 100644 index 227ee78b..00000000 Binary files a/docs/imgs/factor-result-in-notebook.gif and /dev/null differ diff --git a/docs/imgs/filter_factor.png b/docs/imgs/filter_factor.png deleted file mode 100644 index 2e269cd0..00000000 Binary files a/docs/imgs/filter_factor.png and /dev/null differ diff --git a/docs/imgs/fundamental-selector-in-notebook.png b/docs/imgs/fundamental-selector-in-notebook.png deleted file mode 100644 index 6d8a60f1..00000000 Binary files a/docs/imgs/fundamental-selector-in-notebook.png and /dev/null differ diff --git a/docs/imgs/multiple-stock-macd.gif b/docs/imgs/multiple-stock-macd.gif deleted file mode 100644 index ef2da823..00000000 Binary files a/docs/imgs/multiple-stock-macd.gif and /dev/null differ diff --git a/docs/imgs/output-value.jpg b/docs/imgs/output-value.jpg deleted file mode 100644 index a3ec4b39..00000000 Binary files a/docs/imgs/output-value.jpg and /dev/null differ diff --git a/docs/imgs/pred_close.png b/docs/imgs/pred_close.png new file mode 100644 index 00000000..268a44ef Binary files /dev/null and b/docs/imgs/pred_close.png differ diff --git a/docs/imgs/realtime_signals.gif b/docs/imgs/realtime_signals.gif deleted file mode 100644 index b0b44e9c..00000000 Binary files a/docs/imgs/realtime_signals.gif and /dev/null differ diff --git a/docs/imgs/result_df.png b/docs/imgs/result_df.png new file mode 100644 index 00000000..18842c63 Binary files /dev/null and b/docs/imgs/result_df.png differ diff --git a/docs/imgs/score_factor.png b/docs/imgs/score_factor.png deleted file mode 100644 index 32e88633..00000000 Binary files a/docs/imgs/score_factor.png and /dev/null differ diff --git a/docs/imgs/show-trader.gif b/docs/imgs/show-trader.gif deleted file mode 100644 index 198fe4aa..00000000 Binary files a/docs/imgs/show-trader.gif and /dev/null differ diff --git a/docs/imgs/single-stock-cross-ma.gif b/docs/imgs/single-stock-cross-ma.gif deleted file mode 100644 index 5beb4885..00000000 Binary files a/docs/imgs/single-stock-cross-ma.gif and /dev/null differ diff --git a/docs/imgs/sql-usage.gif b/docs/imgs/sql-usage.gif deleted file mode 100644 index 48d3e389..00000000 Binary files a/docs/imgs/sql-usage.gif and /dev/null differ diff --git a/docs/imgs/stock-kaihu.jpeg b/docs/imgs/stock-kaihu.jpeg deleted file mode 100644 index db065148..00000000 Binary files a/docs/imgs/stock-kaihu.jpeg and /dev/null differ diff --git a/docs/imgs/technical-selector-in-notebook.gif b/docs/imgs/technical-selector-in-notebook.gif deleted file mode 100644 index e8d1292f..00000000 Binary files a/docs/imgs/technical-selector-in-notebook.gif and /dev/null differ diff --git a/docs/imgs/trader_list_view.gif b/docs/imgs/trader_list_view.gif deleted file mode 100644 index c199ce82..00000000 Binary files a/docs/imgs/trader_list_view.gif and /dev/null differ diff --git a/docs/imgs/view.png b/docs/imgs/view.png new file mode 100644 index 00000000..711d1839 Binary files /dev/null and b/docs/imgs/view.png differ diff --git a/docs/imgs/wechat.jpeg b/docs/imgs/wechat.jpeg deleted file mode 100644 index bd4d8e88..00000000 Binary files a/docs/imgs/wechat.jpeg and /dev/null differ diff --git a/docs/imgs/wechat.png b/docs/imgs/wechat.png deleted file mode 100644 index 3d7d8116..00000000 Binary files a/docs/imgs/wechat.png and /dev/null differ diff --git a/docs/index.html b/docs/index.html deleted file mode 100644 index dbf4aeba..00000000 --- a/docs/index.html +++ /dev/null @@ -1,30 +0,0 @@ - - - - - ZVT - - - - - - - -
- - - - - - - \ No newline at end of file diff --git a/docs/intro.md b/docs/intro.md deleted file mode 100644 index a1ecda6e..00000000 --- a/docs/intro.md +++ /dev/null @@ -1,45 +0,0 @@ -[![github](https://img.shields.io/github/stars/zvtvz/zvt.svg)](https://github.com/zvtvz/zvt) -[![image](https://img.shields.io/pypi/v/zvt.svg)](https://pypi.org/project/zvt/) -[![image](https://img.shields.io/pypi/l/zvt.svg)](https://pypi.org/project/zvt/) -[![image](https://img.shields.io/pypi/pyversions/zvt.svg)](https://pypi.org/project/zvt/) -[![Build Status](https://api.travis-ci.org/zvtvz/zvt.svg?branch=master)](https://travis-ci.org/zvtvz/zvt) -[![codecov.io](https://codecov.io/github/zvtvz/zvt/coverage.svg?branch=master)](https://codecov.io/github/zvtvz/zvt) -[![HitCount](http://hits.dwyl.io/zvtvz/zvt.svg)](http://hits.dwyl.io/zvtvz/zvt) - -ZVT是在[fooltrader](https://github.com/foolcage/fooltrader)的基础上重新思考后编写的量化项目,其包含可扩展的数据recorder,api,因子计算,选股,回测,交易,以及统一的可视化,定位为**中低频** **多级别** **多因子** **多标的** 全市场分析和交易框架。 - -相比其他的量化系统,其不依赖任何中间件,**非常轻,可测试,可推断,可扩展**。 - -编写该系统的初心: -* 构造一个中立标准的数据schema -* 能够容易地把各provider的数据适配到系统 -* 相同的算法,只写一次,可以应用到任何市场 -* 适用于低耗能的人脑+个人电脑 - -## ✨特性 -- **丰富全面开箱即用可扩展可持续增量更新的数据** - - A股数据:行情,财务报表,大股东行为,高管交易,分红融资详情,个股板块资金流向,融资融券,龙虎榜等数据 - - 市场整体pe,pb,资金流,融资融券,外资动向等数据 - - 数字货币数据 -- 数据的标准化,多数据源(provider)交叉验证,补全 -- **简洁可扩展的数据框架** -- **统一简洁的API,支持sql查询,支持pandas** -- 可扩展的factor,对单标的和多标的的运算抽象了一种统一的计算方式 -- **支持多标的,多factor,多级别的回测方式** -- 支持交易信号和策略使用到的factor的实时可视化 -- 支持多种实盘交易(实现中) - -## 💯关于该文档 - -该文档由[项目docs](https://github.com/zvtvz/zvt/tree/master/docs)自动生成,主要内容为项目的**基本用法**和**整体设计**,你应该仔细地阅读所有的章节。 - -现实中的使用例子会在公众号和知乎专栏中长期更新,其假设你已经读过此文档。 - ------- -微信公众号: - -Wechat - -知乎专栏: - -https://zhuanlan.zhihu.com/automoney \ No newline at end of file diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 00000000..6fcf05b4 --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=source +set BUILDDIR=build + +if "%1" == "" goto help + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.https://www.sphinx-doc.org/ + exit /b 1 +) + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/docs/quick-start.md b/docs/quick-start.md deleted file mode 100644 index e2a56e71..00000000 --- a/docs/quick-start.md +++ /dev/null @@ -1,201 +0,0 @@ -## 1. 🔖5分钟用起来 - ->一个系统,如果5分钟用不起来,那肯定是设计软件的人本身就没想清楚,并且其压根就没打算自己用。 - -### 1.1 安装 - -假设你已经在>=python3.6的环境中(建议新建一个干净的virtual env环境) -``` -pip3 install zvt -i http://pypi.douban.com/simple --trusted-host pypi.douban.com - -pip3 show zvt -``` - -如果不是最新版本 -``` -pip install --upgrade zvt -i http://pypi.douban.com/simple --trusted-host pypi.douban.com -``` - -> 请根据需要决定是否使用豆瓣镜像源 - - -### 1.2 进入ipython,体验一把 -``` -In [1]: import os - -#这一句会进入测试环境,使用自带的测试数据 -In [2]: os.environ["TESTING_ZVT"] = "1" - -In [3]: from zvt import * -{'data_path': '/Users/xuanqi/zvt-test-home/data', - 'domain_module': 'zvt.domain', - 'email_password': '', - 'email_username': '', - 'http_proxy': '127.0.0.1:1087', - 'https_proxy': '127.0.0.1:1087', - 'jq_password': '', - 'jq_username': '', - 'log_path': '/Users/xuanqi/zvt-test-home/logs', - 'smtp_host': 'smtpdm.aliyun.com', - 'smtp_port': '80', - 'ui_path': '/Users/xuanqi/zvt-test-home/ui', - 'wechat_app_id': '', - 'wechat_app_secrect': '', - 'zvt_home': '/Users/xuanqi/zvt-test-home'} -In [5]: from zvt.api import * - -In [6]: df = get_kdata(entity_id='stock_sz_000338',provider='joinquant') - -n [8]: df.tail() -Out[8]: - id entity_id timestamp provider code name level open close high low volume turnover change_pct turnover_rate -timestamp -2019-10-29 stock_sz_000338_2019-10-29 stock_sz_000338 2019-10-29 joinquant 000338 潍柴动力 1d 12.00 11.78 12.02 11.76 28533132.0 3.381845e+08 None None -2019-10-30 stock_sz_000338_2019-10-30 stock_sz_000338 2019-10-30 joinquant 000338 潍柴动力 1d 11.74 12.05 12.08 11.61 42652561.0 5.066013e+08 None None -2019-10-31 stock_sz_000338_2019-10-31 stock_sz_000338 2019-10-31 joinquant 000338 潍柴动力 1d 12.05 11.56 12.08 11.50 77329380.0 9.010439e+08 None None -2019-11-01 stock_sz_000338_2019-11-01 stock_sz_000338 2019-11-01 joinquant 000338 潍柴动力 1d 11.55 12.69 12.70 11.52 160732771.0 1.974125e+09 None None -2019-11-04 stock_sz_000338_2019-11-04 stock_sz_000338 2019-11-04 joinquant 000338 潍柴动力 1d 12.77 13.00 13.11 12.77 126673139.0 1.643788e+09 None None -``` - -### 1.3 财务数据 -``` -In [12]: from zvt.domain import * -In [13]: df = get_finance_factor(entity_id='stock_sz_000338',columns=FinanceFactor.important_cols()) - -In [14]: df.tail() -Out[14]: - basic_eps total_op_income net_profit op_income_growth_yoy net_profit_growth_yoy roe rota gross_profit_margin net_margin timestamp -timestamp -2018-10-31 0.73 1.182000e+11 6.001000e+09 0.0595 0.3037 0.1647 0.0414 0.2164 0.0681 2018-10-31 -2019-03-26 1.08 1.593000e+11 8.658000e+09 0.0507 0.2716 0.2273 0.0589 0.2233 0.0730 2019-03-26 -2019-04-29 0.33 4.521000e+10 2.591000e+09 0.1530 0.3499 0.0637 0.0160 0.2166 0.0746 2019-04-29 -2019-08-30 0.67 9.086000e+10 5.287000e+09 0.1045 0.2037 0.1249 0.0315 0.2175 0.0759 2019-08-30 -2019-10-31 0.89 1.267000e+11 7.058000e+09 0.0721 0.1761 0.1720 0.0435 0.2206 0.0736 2019-10-31 - -``` - -### 1.4 跑个策略 -``` -In [15]: from zvt.samples import * -In [16]: t = MyMaTrader(codes=['000338'], level=IntervalLevel.LEVEL_1DAY, start_timestamp='2018-01-01', - ...: end_timestamp='2019-06-30', trader_name='000338_ma_trader') -In [17]: t.run() - -``` -测试数据里面包含的SAMPLE_STOCK_CODES = ['000001', '000783', '000778', '603220', '601318', '000338', '002572', '300027'],试一下传入其任意组合,即可看多标的的效果。 - -

- -## 2. 📝正式环境 -项目支持多环境切换,默认情况下,不设置环境变量TESTING_ZVT即为正式环境 - ``` -In [1]: from zvt import * -{'data_path': '/Users/xuanqi/zvt-home/data', - 'domain_module': 'zvt.domain', - 'email_password': '', - 'email_username': '', - 'http_proxy': '127.0.0.1:1087', - 'https_proxy': '127.0.0.1:1087', - 'jq_password': '', - 'jq_username': '', - 'log_path': '/Users/xuanqi/zvt-home/logs', - 'smtp_host': 'smtpdm.aliyun.com', - 'smtp_port': '80', - 'ui_path': '/Users/xuanqi/zvt-home/ui', - 'wechat_app_id': '', - 'wechat_app_secrect': '', - 'zvt_home': '/Users/xuanqi/zvt-home'} - ``` - ->如果你不想使用使用默认的zvt_home目录,请设置环境变量ZVT_HOME再运行。 - -所有操作跟测试环境是一致的,只是操作的目录不同。 - -### 2.1 下载历史数据(可选) -东财数据: https://pan.baidu.com/s/1CMAlCRYwlhGVxS6drYUEgA 提取码: q2qn -资金流,板块数据(新浪): https://pan.baidu.com/s/1eusW65sdK_WE4icnt8JS1g 提取码: uux3 -市场概况,沪/深港通,融资融券数据(聚宽): https://pan.baidu.com/s/1ijrgjUd1WkRMONrwRQU-4w 提取码: dipd - -把下载的数据解压到正式环境的data_path(所有db文件放到该目录下,没有层级结构) - -数据的更新是增量的,下载历史数据只是为了节省时间,全部自己更新也是可以的。 - -### 2.2 注册聚宽(可选) -项目数据支持多provider,在数据schema一致性的基础上,可根据需要进行选择和扩展,目前支持新浪,东财,网易,交易所,ccxt等免费数据。 - -#### 数据的设计上是让provider来适配schema,而不是反过来,这样即使某provider不可用了,换一个即可,不会影响整个系统的使用。 - -但免费数据的缺点是显而易见的:不稳定,爬取清洗数据耗时耗力,维护代价巨大,且随时可能不可用。 -个人建议:如果只是学习研究,可以使用免费数据;如果是真正有意投身量化,还是选一家可靠的数据提供商。 - -项目支持聚宽的数据,可戳以下链接申请使用(目前可免费使用一年) -https://www.joinquant.com/default/index/sdk?channelId=953cbf5d1b8683f81f0c40c9d4265c0d - -> 项目中大部分的免费数据目前都是比较稳定的,且做过严格测试,特别是东财的数据,可放心使用 - -> 添加其他数据提供商,请参考[数据扩展教程](http://zvt.foolcage.com/#/data_extending) - - -### 2.3 配置 -在zvt_home目录中找到config.json进行配置: - - * jq_username - -聚宽数据用户名 - - * jq_password - -聚宽数据密码 - -> TODO:其他配置项用法 - -### 2.4 更新数据 - -``` - -In [1]: from zvt.domain import * -In [2]: global_schemas -[zvt.domain.dividend_financing.DividendFinancing, - zvt.domain.dividend_financing.DividendDetail, - zvt.domain.dividend_financing.SpoDetail...] -``` -整个系统的schema和其对应的recorders采取自注册的方式,global_schemas为系统支持的schema,而其对应的recorder以及如何更新数据,方法如下: -``` -In [17]: FinanceFactor.recorders -Out[17]: [zvt.recorders.eastmoney.finance.china_stock_finance_factor_recorder.ChinaStockFinanceFactorRecorder] - -In [18]: FinanceFactor.record_data(codes=['000338']) -FinanceFactor registered recorders:[] -auth success ( 如需说明文档请查看:https://url.cn/5oB7EOO,更多问题请联系JQData管理员,微信号:JQData02 ) -INFO MainThread 2019-12-15 18:03:35,493 ChinaStockFinanceFactorRecorder:recorder.py:551 evaluate_start_end_size_timestamps entity_id:stock_sz_000338,timestamps start:2002-12-31 00:00:00,end:2019-09-30 00:00:00 -INFO MainThread 2019-12-15 18:03:35,509 ChinaStockFinanceFactorRecorder:recorder.py:556 evaluate_start_end_size_timestamps latest record timestamp:2019-10-31 00:00:00 -INFO MainThread 2019-12-15 18:03:35,510 ChinaStockFinanceFactorRecorder:recorder.py:348 run entity_id:stock_sz_000338,evaluate_start_end_size_timestamps result:None,None,0,None -INFO MainThread 2019-12-15 18:03:35,510 ChinaStockFinanceFactorRecorder:recorder.py:357 run finish recording for entity_id:stock_sz_000338,latest_timestamp:None -已退出 -``` -* codes代表需要抓取的股票代码 -* 不传入codes则是全市场抓取 -* 所有的schema对应的数据更新,方法是一致的 - -定时任务的方式更新可参考[runners](https://github.com/zvtvz/zvt/blob/master/examples/recorders) - -## 3. 🚀开发 - -### 3.1 clone代码 - -``` -git clone https://github.com/zvtvz/zvt.git -``` - -设置项目的virtual env(python>=3.6),安装依赖 -``` -pip3 install -r requirements.txt -pip3 install pytest -``` - -### 3.2 测试案例 -pycharm导入工程(推荐,你也可以使用其他ide),然后pytest跑测试案例 - -

- -大部分功能使用都可以从tests里面参考 \ No newline at end of file diff --git a/docs/recorder.png b/docs/recorder.png deleted file mode 100644 index 7af91b47..00000000 Binary files a/docs/recorder.png and /dev/null differ diff --git a/docs/selector_usage.md b/docs/selector_usage.md deleted file mode 100644 index aa744ecb..00000000 --- a/docs/selector_usage.md +++ /dev/null @@ -1,63 +0,0 @@ -## 什么是selector - -selector是根据factor来对标的进行综合评价,生成**开多,开空,持多,持空**标的选择器,其比一般的所谓**选股功能**要强大得多. - -## 构造自己的selector - -### TechnicalSelector - -``` -class TechnicalSelector(TargetSelector): - def __init__(self, entity_ids=None, entity_type=SecurityType.stock, exchanges=['sh', 'sz'], codes=None, - the_timestamp=None, start_timestamp=None, end_timestamp=None, long_threshold=0.8, short_threshold=-0.8, - level=IntervalLevel.LEVEL_1DAY, - provider='joinquant') -> None: - super().__init__(entity_ids, entity_type, exchanges, codes, the_timestamp, start_timestamp, end_timestamp, - long_threshold, short_threshold, level, provider) - - def init_factors(self, entity_ids, entity_type, exchanges, codes, the_timestamp, start_timestamp, - end_timestamp): - bull_factor = BullFactor(entity_ids=entity_ids, entity_type=entity_type, exchanges=exchanges, - codes=codes, the_timestamp=the_timestamp, start_timestamp=start_timestamp, - end_timestamp=end_timestamp, provider=self.provider, level=self.level) - - self.filter_factors = [bull_factor] - -s = TechnicalSelector(codes=SAMPLE_STOCK_CODES, start_timestamp='2018-01-01', end_timestamp='2019-06-30') -s.run() -s.draw() -``` - -该selector选取macd黄白线在0轴上的标的,选取完成后,可以运行draw来获取选中标的的表格. - -

- - -### FundamentalSelector -``` -from zvt.factors.finance_factor import FinanceGrowthFactor -from zvt.selectors.selector import TargetSelector - - -class FundamentalSelector(TargetSelector): - def init_factors(self, entity_ids, entity_type, exchanges, codes, the_timestamp, start_timestamp, - end_timestamp): - factor = FinanceGrowthFactor(entity_ids=entity_ids, entity_type=entity_type, exchanges=exchanges, - codes=codes, the_timestamp=the_timestamp, start_timestamp=start_timestamp, - end_timestamp=end_timestamp, keep_all_timestamp=True, provider=self.provider) - self.score_factors.append(factor) - -selector: TargetSelector = FundamentalSelector(start_timestamp='2018-01-01', end_timestamp='2019-06-30') -selector.run() -selector.draw() -``` - -该selector选取成长性评分在0.8以上的个股. - -

- -> 注意:使用到的ScoreFactor是对全市场和历史数据进行运算,比较耗时,但运算结果可以直接进行回测. - -### 多factor运算 - -只需要在init_factors里面继续添加factor即可. \ No newline at end of file diff --git a/docs/source/_static/accumulator.png b/docs/source/_static/accumulator.png new file mode 100644 index 00000000..b9bb03f3 Binary files /dev/null and b/docs/source/_static/accumulator.png differ diff --git a/docs/source/_static/bear.gif b/docs/source/_static/bear.gif new file mode 100644 index 00000000..30d1e675 Binary files /dev/null and b/docs/source/_static/bear.gif differ diff --git a/docs/source/_static/boll_factor.png b/docs/source/_static/boll_factor.png new file mode 100644 index 00000000..9358d325 Binary files /dev/null and b/docs/source/_static/boll_factor.png differ diff --git a/docs/source/_static/compare_cn_us.png b/docs/source/_static/compare_cn_us.png new file mode 100644 index 00000000..7b538462 Binary files /dev/null and b/docs/source/_static/compare_cn_us.png differ diff --git a/docs/source/_static/compare_yields.png b/docs/source/_static/compare_yields.png new file mode 100644 index 00000000..8d60b496 Binary files /dev/null and b/docs/source/_static/compare_yields.png differ diff --git a/docs/source/_static/data_structure.png b/docs/source/_static/data_structure.png new file mode 100644 index 00000000..1814970c Binary files /dev/null and b/docs/source/_static/data_structure.png differ diff --git a/docs/source/_static/factor_draw.png b/docs/source/_static/factor_draw.png new file mode 100644 index 00000000..8621f812 Binary files /dev/null and b/docs/source/_static/factor_draw.png differ diff --git a/docs/source/_static/factor_flow.png b/docs/source/_static/factor_flow.png new file mode 100644 index 00000000..566a9bc8 Binary files /dev/null and b/docs/source/_static/factor_flow.png differ diff --git a/docs/source/_static/factor_result.png b/docs/source/_static/factor_result.png new file mode 100644 index 00000000..0f718a80 Binary files /dev/null and b/docs/source/_static/factor_result.png differ diff --git a/docs/source/_static/normal_data.png b/docs/source/_static/normal_data.png new file mode 100644 index 00000000..05a2a07e Binary files /dev/null and b/docs/source/_static/normal_data.png differ diff --git a/docs/source/_static/stream.png b/docs/source/_static/stream.png new file mode 100644 index 00000000..ba99bcc7 Binary files /dev/null and b/docs/source/_static/stream.png differ diff --git a/docs/source/_static/tag.png b/docs/source/_static/tag.png new file mode 100644 index 00000000..03f39274 Binary files /dev/null and b/docs/source/_static/tag.png differ diff --git a/docs/source/_static/transformer.png b/docs/source/_static/transformer.png new file mode 100644 index 00000000..dbb510f4 Binary files /dev/null and b/docs/source/_static/transformer.png differ diff --git a/docs/source/_static/view.png b/docs/source/_static/view.png new file mode 100644 index 00000000..711d1839 Binary files /dev/null and b/docs/source/_static/view.png differ diff --git a/docs/source/_templates/custom-class-template.rst b/docs/source/_templates/custom-class-template.rst new file mode 100644 index 00000000..8cdfba6e --- /dev/null +++ b/docs/source/_templates/custom-class-template.rst @@ -0,0 +1,14 @@ +{{ objname | escape | underline}} + +.. currentmodule:: {{ module }} + +.. autoclass:: {{ objname }} + :members: + :show-inheritance: + :inherited-members: + + {% block methods %} + .. automethod:: __init__ + + + {% endblock %} diff --git a/docs/source/_templates/custom-module-template.rst b/docs/source/_templates/custom-module-template.rst new file mode 100644 index 00000000..f46f9f63 --- /dev/null +++ b/docs/source/_templates/custom-module-template.rst @@ -0,0 +1,66 @@ +{{ fullname | escape | underline}} + +.. automodule:: {{ fullname }} + + {% block attributes %} + {% if attributes %} + .. rubric:: {{ _('Module Attributes') }} + + .. autosummary:: + :toctree: + {% for item in attributes %} + {{ item }} + {%- endfor %} + {% endif %} + {% endblock %} + + {% block functions %} + {% if functions %} + .. rubric:: {{ _('Functions') }} + + .. autosummary:: + :toctree: + {% for item in functions %} + {{ item }} + {%- endfor %} + {% endif %} + {% endblock %} + + {% block classes %} + {% if classes %} + .. rubric:: {{ _('Classes') }} + + .. autosummary:: + :toctree: + :template: custom-class-template.rst + {% for item in classes %} + {{ item }} + {%- endfor %} + {% endif %} + {% endblock %} + + {% block exceptions %} + {% if exceptions %} + .. rubric:: {{ _('Exceptions') }} + + .. autosummary:: + :toctree: + {% for item in exceptions %} + {{ item }} + {%- endfor %} + {% endif %} + {% endblock %} + +{% block modules %} +{% if modules %} +.. rubric:: Modules + +.. autosummary:: + :toctree: + :template: custom-module-template.rst + :recursive: +{% for item in modules %} + {{ item }} +{%- endfor %} +{% endif %} +{% endblock %} diff --git a/docs/source/_templates/sidebarlogo.html b/docs/source/_templates/sidebarlogo.html new file mode 100644 index 00000000..5c24b999 --- /dev/null +++ b/docs/source/_templates/sidebarlogo.html @@ -0,0 +1,4 @@ +

+ +

diff --git a/docs/source/api/index.rst b/docs/source/api/index.rst new file mode 100644 index 00000000..f0d06c4e --- /dev/null +++ b/docs/source/api/index.rst @@ -0,0 +1,29 @@ +API +=== + +.. autosummary:: + :toctree: _autosummary + :template: custom-module-template.rst + :recursive: + + zvt + zvt.api + zvt.autocode + zvt.consts + zvt.common + zvt.contract + zvt.domain + zvt.factors + zvt.fill_project + zvt.informer + zvt.misc + zvt.ml + zvt.plugin + zvt.recorders + zvt.rest + zvt.samples + zvt.tag + zvt.trader + zvt.trading + zvt.ui + zvt.utils \ No newline at end of file diff --git a/docs/source/conf.py b/docs/source/conf.py new file mode 100644 index 00000000..d0960ad5 --- /dev/null +++ b/docs/source/conf.py @@ -0,0 +1,70 @@ +# Configuration file for the Sphinx documentation builder. +# +# This file only contains a selection of the most common options. For a full +# list see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +import os +import sys + +sys.path.insert(0, os.path.abspath("../../src")) + + +# -- Project information ----------------------------------------------------- + +project = "zvt" +copyright = "2022, foolcage" +author = "foolcage" + +# The full version, including alpha/beta/rc tags +release = "0.13.0" + + +# -- General configuration --------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "IPython.sphinxext.ipython_directive", + "IPython.sphinxext.ipython_console_highlighting", +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = [] + + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = "alabaster" + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +html_sidebars = { + "**": ["sidebarlogo.html", "globaltoc.html", "sourcelink.html", "searchbox.html", "localtoc.html", "relations.html"] +} + +autosummary_generate = True + +autodoc_default_options = { + "member-order": "bysource", +} diff --git a/docs/source/contributor.rst b/docs/source/contributor.rst new file mode 100644 index 00000000..af08c432 --- /dev/null +++ b/docs/source/contributor.rst @@ -0,0 +1,27 @@ +==================== +Contributor +==================== + +Thanks, my friends. + +* 隋鹏飞 +* `Tiger `_ +* 蓝小球 +* 青玄 +* \*崇 +* \*阳 +* 张凯 +* Rubicon_陈小虾 +* `看门大爷 `_ +* 芝华 +* 雨多田光 +* 叶金荣 +* 冷冷 +* Landy +* Sunny +* Ccc国毅 +* Jim Tong +* tom +* 小七 +* 小强 +* Cascara Latte- 刘可烁 diff --git a/docs/source/core_concepts.rst b/docs/source/core_concepts.rst new file mode 100644 index 00000000..b3d50b6a --- /dev/null +++ b/docs/source/core_concepts.rst @@ -0,0 +1,69 @@ +==================== +Core concepts +==================== + +Entity + the existence described by itself, classification of existential concepts. + +EntityEvent + the event happened on the entity. + +TradableEntity + the Entity could be traded, e.g, Stock, Future. + +ActorEntity + the Entity acting in market, e.g, Fund, Individual, Government. + +IntervalLevel + repeated fixed time interval, e.g, 5m, 1d. + +Schema + the data structure with fields, one schema could have multiple storage with different Providers. + +Kdata(Quote) + the candlestick data with OHLC. + +Provider + the data provider. + +Storage + the sql database, e.g, sqlite, mysql. + +Recorder + class for recording data for Schema. + +Factor + data describing market.It's computed from from Schema, and save as new Schema if need. + +TargetSelector + the class select targets according to Factor. + +Trader + the backtest engine using TargetSelector, MLMachine or free style. + +Tagger + classify TradableEntity by different dimensions, could be used as ml category feature. + +MLMachine + the ml engine. + +TradingSignal + the signal contains information about how to trade. + +Drawer + the class for draw charts. + +Intent + what do you want to do, e.g. compare, distribute, composite. + +Normal data + the pandas dataframe with multiple index which level 0 is entity_id and level 1 is timestamp: + +=============== ========== ===== ===== ===== ===== +entity_id timestamp col1 col2 col3 col4 +=============== ========== ===== ===== ===== ===== +stock_sz_000338 2020-05-05 1.2 0.5 0.3 a +... 2020-05-06 1.0 0.7 0.2 b +stock_sz_000778 2020-05-05 1.2 0.5 0.3 a +... 2020-05-06 1.0 0.7 0.2 b +=============== ========== ===== ===== ===== ===== diff --git a/docs/source/data/adding_new_entity.rst b/docs/source/data/adding_new_entity.rst new file mode 100644 index 00000000..1dfd1be7 --- /dev/null +++ b/docs/source/data/adding_new_entity.rst @@ -0,0 +1,157 @@ +.. _adding_new_entity: + +================= +Adding new entity +================= + +It's human nature to like the new and hate the old. Adding new TradableEntity is easy in zvt. + +Adding new entity is nothing than a specific case of :ref:`Adding data `. +Let's show the key steps below which add :class:`~.zvt.domain.meta.future_meta.Future`. + +Define entity Schema +-------------------------- + +:: + + # -*- coding: utf-8 -*- + from sqlalchemy.ext.declarative import declarative_base + + from zvt.contract.register import register_schema, register_entity + from zvt.contract.schema import TradableEntity + + FutureMetaBase = declarative_base() + + + @register_entity(entity_type="future") + class Future(FutureMetaBase, TradableEntity): + __tablename__ = "future" + + + register_schema(providers=["em"], db_name="future_meta", schema_base=FutureMetaBase) + +Implement recorder for the entity +--------------------------------- + +:: + + from zvt.contract.api import df_to_db + from zvt.contract.recorder import Recorder + from zvt.domain import Future + from zvt.recorders.em import em_api + + + class EMFutureRecorder(Recorder): + provider = "em" + data_schema = Future + + def run(self): + df = em_api.get_tradable_list(entity_type="future") + self.logger.info(df) + df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) + + +Define OHLC schema(kdata) for the entity +---------------------------------------- + +zvt provide a standard way to generate OHLC schema for the tradable entity. +All `OHLC schemas `_ is generated by +`fill project script `_. + +e.g generate Future OHLC schema. + +:: + + gen_kdata_schema( + pkg="zvt", + providers=["em"], + entity_type="future", + levels=[IntervalLevel.LEVEL_1DAY], + entity_in_submodule=True, + ) + +The OHLC schema definition principle is: **one level one file** + +So we would define a common OHLC schema for each entity type in `quotes module `_. + +e.g. Future common OHLC schema + +:: + + class FutureKdataCommon(KdataCommon): + #: 持仓量 + interest = Column(Float) + #: 结算价 + settlement = Column(Float) + #: 涨跌幅(按收盘价) + # change_pct = Column(Float) + #: 涨跌幅(按结算价) + change_pct1 = Column(Float) + +And we could relate the common kdata schema with the recorder and route level to specific schema automatically. + +Implement recorder for OHLC schema(kdata) +----------------------------------------- + +Check `em quotes recorder `_ for +the details. + +:: + + class EMFutureKdataRecorder(BaseEMStockKdataRecorder): + entity_provider = "em" + entity_schema = Future + + data_schema = FutureKdataCommon + + +Use them in zvt way +------------------- + +Fetch the entity list: + +:: + + >>> from zvt.domain import Future + >>> Future.record_data() + >>> df = Future.query_data() + >>> print(df) + + id entity_id timestamp entity_type exchange code name list_date end_date + 0 future_cffex_IC future_cffex_IC NaT future cffex IC 中证当月连续 NaT None + 1 future_cffex_IF future_cffex_IF NaT future cffex IF 沪深当月连续 NaT None + 2 future_cffex_IH future_cffex_IH NaT future cffex IH 上证当月连续 NaT None + 3 future_cffex_T future_cffex_T NaT future cffex T 十债当季连续 NaT None + 4 future_cffex_TF future_cffex_TF NaT future cffex TF 五债当季连续 NaT None + .. ... ... ... ... ... ... ... ... ... + 65 future_ine_LU future_ine_LU 2020-06-22 future ine LU 低硫燃油主力 2020-06-22 None + 66 future_czce_PF future_czce_PF 2020-10-12 future czce PF 短纤主力 2020-10-12 None + 67 future_ine_BC future_ine_BC 2020-11-19 future ine BC 国际铜主力 2020-11-19 None + 68 future_dce_LH future_dce_LH 2021-01-08 future dce LH 生猪主力 2021-01-08 None + 69 future_czce_PK future_czce_PK 2021-02-01 future czce PK 花生主力 2021-02-01 None + + [70 rows x 9 columns] + +Fetch the quotes: + +:: + + >>> from zvt.domain import Future1dKdata + >>> Future1dKdata.record_data(code="CU") + >>> df = Future1dKdata.query_data(code="CU") + >>> print(df) + + id entity_id timestamp provider code name level open close high low volume turnover change_pct turnover_rate interest settlement change_pct1 + 0 future_shfe_CU_1996-04-03 future_shfe_CU 1996-04-03 em CU 沪铜主力 1d 22930.0 22840.0 23000.0 22840.0 353.0 0.000000e+00 0.0000 0.0 None None None + 1 future_shfe_CU_1996-04-04 future_shfe_CU 1996-04-04 em CU 沪铜主力 1d 22700.0 22750.0 22820.0 22650.0 251.0 0.000000e+00 -0.0039 0.0 None None None + 2 future_shfe_CU_1996-04-05 future_shfe_CU 1996-04-05 em CU 沪铜主力 1d 22520.0 22780.0 22820.0 22500.0 298.0 0.000000e+00 0.0013 0.0 None None None + 3 future_shfe_CU_1996-04-08 future_shfe_CU 1996-04-08 em CU 沪铜主力 1d 22660.0 22650.0 22680.0 22600.0 98.0 0.000000e+00 -0.0057 0.0 None None None + 4 future_shfe_CU_1996-04-09 future_shfe_CU 1996-04-09 em CU 沪铜主力 1d 22830.0 22810.0 22860.0 22810.0 56.0 0.000000e+00 0.0071 0.0 None None None + ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... + 6343 future_shfe_CU_2022-04-21 future_shfe_CU 2022-04-21 em CU 沪铜主力 1d 74140.0 74480.0 74750.0 74140.0 48008.0 1.787678e+10 -0.0004 0.0 None None None + 6344 future_shfe_CU_2022-04-22 future_shfe_CU 2022-04-22 em CU 沪铜主力 1d 74800.0 75010.0 75200.0 74690.0 58874.0 2.205633e+10 0.0073 0.0 None None None + 6345 future_shfe_CU_2022-04-25 future_shfe_CU 2022-04-25 em CU 沪铜主力 1d 74900.0 73660.0 75190.0 73660.0 107455.0 3.989090e+10 -0.0168 0.0 None None None + 6346 future_shfe_CU_2022-04-26 future_shfe_CU 2022-04-26 em CU 沪铜主力 1d 73170.0 73260.0 73750.0 72500.0 113019.0 4.130931e+10 -0.0132 0.0 None None None + 6347 future_shfe_CU_2022-04-27 future_shfe_CU 2022-04-27 em CU 沪铜主力 1d 72990.0 73100.0 73560.0 72910.0 61563.0 2.254089e+10 0.0000 0.0 None None None + + [6348 rows x 18 columns] diff --git a/docs/source/data/data_concepts.rst b/docs/source/data/data_concepts.rst new file mode 100644 index 00000000..e8c1c85f --- /dev/null +++ b/docs/source/data/data_concepts.rst @@ -0,0 +1,336 @@ +============= +Data concepts +============= + + +.. _data.tradable_entity: + +TradableEntity +------------------------------ +:class:`~.zvt.contract.schema.TradableEntity` is anything could be traded, it could be :class:`~.zvt.domain.meta.stock_meta.Stock`, +:class:`~.zvt.domain.meta.etf_meta.Etf`, :class:`~.zvt.domain.meta.index_meta.Index`, future, cryptocurrency, or even a **sports match**. + +Let's start with the real world tradable entities: China stock and USA stock ——— the world's most involved trading targets. + +record and query stock: +:: + + >>> from zvt.domain import Stock + >>> Stock.record_data() + >>> df = Stock.query_data() + >>> print(df) + id entity_id timestamp entity_type exchange code name list_date end_date + 0 stock_sh_600651 stock_sh_600651 1986-09-26 stock sh 600651 *ST飞乐 1986-09-26 None + 1 stock_sz_000004 stock_sz_000004 1990-12-01 stock sz 000004 国华网安 1990-12-01 None + 2 stock_sz_000005 stock_sz_000005 1990-12-10 stock sz 000005 世纪星源 1990-12-10 None + 3 stock_sh_600601 stock_sh_600601 1990-12-19 stock sh 600601 方正科技 1990-12-19 None + 4 stock_sh_600602 stock_sh_600602 1990-12-19 stock sh 600602 云赛智联 1990-12-19 None + ... ... ... ... ... ... ... ... ... ... + 4615 stock_sh_603176 stock_sh_603176 2021-12-31 stock sh 603176 汇通集团 2021-12-31 None + 4616 stock_sh_600941 stock_sh_600941 2022-01-05 stock sh 600941 中国移动 2022-01-05 None + 4617 stock_sh_688262 stock_sh_688262 2022-01-06 stock sh 688262 国芯科技 2022-01-06 None + 4618 stock_sh_688176 stock_sh_688176 2022-01-07 stock sh 688176 亚虹医药 2022-01-07 None + 4619 stock_sz_301159 stock_sz_301159 2022-01-07 stock sz 301159 N三维 2022-01-07 None + + [4620 rows x 9 columns] + +record and query stockus: +:: + + >>> from zvt.domain import Stockus + >>> Stockus.record_data() + >>> df = Stockus.query_data() + >>> print(df) + id entity_id timestamp entity_type exchange code name list_date end_date + 0 stockus_nasdaq_CMII stockus_nasdaq_CMII None stockus nasdaq CMII CM Life Sciences II Inc-A None None + 1 stockus_nasdaq_CBLI stockus_nasdaq_CBLI None stockus nasdaq CBLI Cytocom Inc None None + 2 stockus_nyse_XPOw stockus_nyse_XPOw None stockus nyse XPOw XPO Logistics Inc WI None None + 3 stockus_nyse_WRI stockus_nyse_WRI None stockus nyse WRI 魏因加滕房地产投资 None None + 4 stockus_nyse_PROS stockus_nyse_PROS None stockus nyse PROS ProSight Global Inc None None + ... ... ... ... ... ... ... ... ... ... + 6345 stockus_nyse_FATH stockus_nyse_FATH None stockus nyse FATH Fathom Digital Manufacturing Co None None + 6346 stockus_nyse_LOCL stockus_nyse_LOCL None stockus nyse LOCL Local Bounti Corp None None + 6347 stockus_nyse_VLD stockus_nyse_VLD None stockus nyse VLD Velo3D Inc None None + 6348 stockus_nyse_AHI stockus_nyse_AHI None stockus nyse AHI Advanced Human Imaging Ltd ADR None None + 6349 stockus_nyse_HLGN stockus_nyse_HLGN None stockus nyse HLGN Heliogen Inc None None + +| what about other tradable entities? +| Show current registered tradable entity type and its schema: + +:: + + >>> from zvt.contract import zvt_context + >>> zvt_context.tradable_schema_map + {'stockus': zvt.domain.meta.stockus_meta.Stockus, + 'stockhk': zvt.domain.meta.stockhk_meta.Stockhk, + 'index': zvt.domain.meta.index_meta.Index, + 'etf': zvt.domain.meta.etf_meta.Etf, + 'stock': zvt.domain.meta.stock_meta.Stock, + 'block': zvt.domain.meta.block_meta.Block, + 'fund': zvt.domain.meta.fund_meta.Fund} + +The key is **entity_type** and the value is its :ref:`Schema`. + +From intuition, stockhk should be stock of hongkong: +:: + + >>> from zvt.domain import Stockhk + >>> Stockhk.record_data() + >>> df = Stockhk.query_data(index='code') + >>> print(df) + + id entity_id timestamp entity_type exchange code name list_date end_date + code + 00001 stockhk_hk_00001 stockhk_hk_00001 NaT stockhk hk 00001 长和 None None + 00002 stockhk_hk_00002 stockhk_hk_00002 NaT stockhk hk 00002 中电控股 None None + 00003 stockhk_hk_00003 stockhk_hk_00003 NaT stockhk hk 00003 香港中华煤气 None None + 00004 stockhk_hk_00004 stockhk_hk_00004 NaT stockhk hk 00004 九龙仓集团 None None + 00005 stockhk_hk_00005 stockhk_hk_00005 NaT stockhk hk 00005 汇丰控股 None None + ... ... ... ... ... ... ... ... ... ... + 09996 stockhk_hk_09996 stockhk_hk_09996 NaT stockhk hk 09996 沛嘉医疗-B None None + 09997 stockhk_hk_09997 stockhk_hk_09997 NaT stockhk hk 09997 康基医疗 None None + 09998 stockhk_hk_09998 stockhk_hk_09998 NaT stockhk hk 09998 光荣控股 None None + 09999 stockhk_hk_09999 stockhk_hk_09999 NaT stockhk hk 09999 网易-S None None + 80737 stockhk_hk_80737 stockhk_hk_80737 NaT stockhk hk 80737 湾区发展-R None None + + [2597 rows x 9 columns] + + >>> df[df.code=='00700'] + + id entity_id timestamp entity_type exchange code name list_date end_date + 2112 stockhk_hk_00700 stockhk_hk_00700 None stockhk hk 00700 腾讯控股 None None + +From intuition, other tradable entities could be added to the system and used in the same way. +Just follow :ref:`Add tradable entity ` + +.. _data.actor_entity: + +ActorEntity +------------------------------ +:class:`~.zvt.contract.schema.ActorEntity` is the beings acting in the market, it could be government, +company, fund or individual. + +:: + + >>> from zvt.domain import StockInstitutionalInvestorHolder + >>> entity_ids = ["stock_sz_000338", "stock_sz_000001"] + >>> StockInstitutionalInvestorHolder.record_data(entity_ids=entity_ids) + >>> df = StockInstitutionalInvestorHolder.query_data(entity_ids=entity_ids) + >>> print(df) + id entity_id timestamp code name actor_id actor_type actor_code actor_name report_period report_date holding_numbers holding_ratio holding_values + 0 stock_sz_000001_1998-06-30_raised_fund_cn_184688 stock_sz_000001 1998-06-30 000001 平安银行 raised_fund_cn_184688 raised_fund 184688 基金开元 half_year 1998-06-30 1.896697e+06 0.001771 3.269906e+07 + 1 stock_sz_000001_1998-09-30_raised_fund_cn_184688 stock_sz_000001 1998-09-30 000001 平安银行 raised_fund_cn_184688 raised_fund 184688 基金开元 season3 1998-09-30 2.634093e+06 0.002460 4.151331e+07 + 2 stock_sz_000001_1998-12-31_raised_fund_cn_184688 stock_sz_000001 1998-12-31 000001 平安银行 raised_fund_cn_184688 raised_fund 184688 基金开元 year 1998-12-31 2.673900e+06 0.002497 3.992133e+07 + 3 stock_sz_000001_1999-03-31_raised_fund_cn_184688 stock_sz_000001 1999-03-31 000001 平安银行 raised_fund_cn_184688 raised_fund 184688 基金开元 season1 1999-03-31 2.378977e+06 0.002221 3.256820e+07 + 4 stock_sz_000001_1999-06-30_raised_fund_cn_500005 stock_sz_000001 1999-06-30 000001 平安银行 raised_fund_cn_500005 raised_fund 500005 基金汉盛 half_year 1999-06-30 4.989611e+06 0.004659 1.386613e+08 + ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... + 22463 stock_sz_000338_2021-09-30_broker_cn_71067063 stock_sz_000338 2021-09-30 000338 潍柴动力 broker_cn_71067063 broker 71067063 东方红信和添安4号 season3 2021-09-30 5.000000e+04 0.000012 8.580000e+05 + 22464 stock_sz_000338_2021-09-30_corporation_cn_1003... stock_sz_000338 2021-09-30 000338 潍柴动力 corporation_cn_10030838 corporation 10030838 潍柴控股集团有限公司 season3 2021-09-30 1.422551e+09 0.018071 2.441097e+10 + 22465 stock_sz_000338_2021-09-30_corporation_cn_1067... stock_sz_000338 2021-09-30 000338 潍柴动力 corporation_cn_10671586 corporation 10671586 香港中央结算有限公司 season3 2021-09-30 4.992710e+08 0.117713 8.567490e+09 + 22466 stock_sz_000338_2021-09-30_corporation_cn_1019... stock_sz_000338 2021-09-30 000338 潍柴动力 corporation_cn_10196008 corporation 10196008 中国证券金融股份有限公司 season3 2021-09-30 1.636089e+08 0.038574 2.807529e+09 + 22467 stock_sz_000338_2021-09-30_corporation_cn_1008... stock_sz_000338 2021-09-30 000338 潍柴动力 corporation_cn_10086358 corporation 10086358 奥地利IVM技术咨询维也纳有限公司 season3 2021-09-30 1.139387e+08 0.026863 1.955188e+09 + + [22468 rows x 14 columns] + +.. note:: + + A good actor should know the good or bad actors in history, more importantly, + the mind behind them. + +.. _data.schema: + +Schema +------------------------------ +Data structure describing :class:`~.zvt.contract.schema.TradableEntity`, :class:`~.zvt.contract.schema.ActorEntity` or events happen on them. +Physically it's table with columns in sql database. One schema could have multiple storage +with different providers. + +.. _data.schema_usage: + +From specific to general, all zvt schema usage is in the same way. + +* from zvt.domain import {Schema} +* {Schema}.record_data +* {Schema}.query_data + +Explore :py:mod:`~.zvt.domain` for pre defined schemas. And check :ref:`record_data & query_data details ` + +:: + + >>> from zvt.domain import * + >>> entity_ids = ["stock_sz_000338", "stock_sz_000001"] + >>> Stock1dHfqKdata.record_data(entity_ids=entity_ids, provider="em") + >>> df = Stock1dHfqKdata.query_data(entity_ids=entity_ids, provider="em") + >>> print(df) + + id entity_id timestamp provider code name level open close high low volume turnover change_pct turnover_rate + 0 stock_sz_000001_1991-04-03 stock_sz_000001 1991-04-03 em 000001 平安银行 1d 49.00 49.00 49.00 49.00 1.0 5.000000e+03 0.2250 0.0000 + 1 stock_sz_000001_1991-04-04 stock_sz_000001 1991-04-04 em 000001 平安银行 1d 48.76 48.76 48.76 48.76 3.0 1.500000e+04 -0.0049 0.0000 + 2 stock_sz_000001_1991-04-05 stock_sz_000001 1991-04-05 em 000001 平安银行 1d 48.52 48.52 48.52 48.52 2.0 1.000000e+04 -0.0049 0.0000 + 3 stock_sz_000001_1991-04-06 stock_sz_000001 1991-04-06 em 000001 平安银行 1d 48.28 48.28 48.28 48.28 7.0 3.400000e+04 -0.0049 0.0000 + 4 stock_sz_000001_1991-04-08 stock_sz_000001 1991-04-08 em 000001 平安银行 1d 48.04 48.04 48.04 48.04 2.0 1.000000e+04 -0.0050 0.0000 + ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... + 10859 stock_sz_000338_2022-01-10 stock_sz_000338 2022-01-10 em 000338 潍柴动力 1d 314.38 314.38 320.37 312.69 956271.0 1.735153e+09 0.0149 0.0190 + 10860 stock_sz_000001_2022-01-11 stock_sz_000001 2022-01-11 em 000001 平安银行 1d 2974.07 2998.45 3019.58 2954.57 1581999.0 2.752485e+09 0.0121 0.0082 + 10861 stock_sz_000338_2022-01-11 stock_sz_000338 2022-01-11 em 000338 潍柴动力 1d 312.69 307.01 314.23 306.70 812187.0 1.444640e+09 -0.0234 0.0161 + 10862 stock_sz_000001_2022-01-12 stock_sz_000001 2022-01-12 em 000001 平安银行 1d 2998.45 2931.82 3004.95 2915.56 1502164.0 2.561266e+09 -0.0222 0.0077 + 10863 stock_sz_000338_2022-01-12 stock_sz_000338 2022-01-12 em 000338 潍柴动力 1d 307.01 305.78 309.62 302.86 882165.0 1.542044e+09 -0.0040 0.0175 + + [10864 rows x 15 columns] + +The data of the schema is recorded in local database by default and could be updated incrementally. + +Find them in this way: + +:: + + {Schema}.get_storages() + +e.g. + +:: + + >>> Stock1dHfqKdata.get_storages() + [Engine(sqlite:////Users/foolcage/zvt-home/data/joinquant_stock_1d_hfq_kdata.db?check_same_thread=False), + Engine(sqlite:////Users/foolcage/zvt-home/data/em_stock_1d_hfq_kdata.db?check_same_thread=False)] + +IntervalLevel +------------------------------ +:class:`~.zvt.contract.IntervalLevel` is repeated fixed time interval, e.g, 5m, 1d. +It's used in OHLC data for describing time window. + +:: + + >>> from zvt.contract import * + >>> for level in IntervalLevel: + >>> print(level.value) + tick + 1m + 5m + 15m + 30m + 1h + 4h + 1d + 1wk + 1mon + +Kdata(Quote, OHLC) +------------------------------ +the candlestick data with OHLC. + +the :class:`~.zvt.contract.schema.TradableEntity` quote schema name follows below rules: + +:: + + {entity_shema}{level}{adjust_type}Kdata + +* entity_schema + +TradableEntity class,e.g. Stock,Stockus. + +* level + +IntervalLevel value, e.g. 1d,1wk. + +* adjust type + +pre adjusted(qfq), post adjusted(hfq), or not adjusted(bfq). + +:: + + >>> for adjust_type in AdjustType: + >>> print(adjust_type.value) + +.. note:: + + In order to be compatible with historical data, the qfq is an exception, {adjust_type} is left empty + +The pre defined kdata schema could be found in :py:mod:`~.zvt.domain.quotes`, it's separated by +entity_schema, level, and adjust type. + +e.g. Stock1dHfqKdata means China Stock daily hfq quotes. + +:: + + >>> from zvt.domain import Stock1dHfqKdata + >>> Stock1dHfqKdata.record_data(code='000338', provider='em') + >>> df = Stock1dHfqKdata.query_data(code='000338', provider='em') + >>> print(df) + + id entity_id timestamp provider code name level open close high low volume turnover change_pct turnover_rate + 0 stock_sz_000338_2007-04-30 stock_sz_000338 2007-04-30 None 000338 潍柴动力 1d 70.00 64.93 71.00 62.88 207375.0 1.365189e+09 2.1720 0.1182 + 1 stock_sz_000338_2007-05-08 stock_sz_000338 2007-05-08 None 000338 潍柴动力 1d 66.60 64.00 68.00 62.88 86299.0 5.563198e+08 -0.0143 0.0492 + 2 stock_sz_000338_2007-05-09 stock_sz_000338 2007-05-09 None 000338 潍柴动力 1d 63.32 62.00 63.88 59.60 93823.0 5.782065e+08 -0.0313 0.0535 + 3 stock_sz_000338_2007-05-10 stock_sz_000338 2007-05-10 None 000338 潍柴动力 1d 61.50 62.49 64.48 61.01 47720.0 2.999226e+08 0.0079 0.0272 + 4 stock_sz_000338_2007-05-11 stock_sz_000338 2007-05-11 None 000338 潍柴动力 1d 61.90 60.65 61.90 59.70 39273.0 2.373126e+08 -0.0294 0.0224 + ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... + 3426 stock_sz_000338_2021-08-27 stock_sz_000338 2021-08-27 None 000338 潍柴动力 1d 331.97 345.95 345.95 329.82 1688497.0 3.370241e+09 0.0540 0.0398 + 3427 stock_sz_000338_2021-08-30 stock_sz_000338 2021-08-30 None 000338 潍柴动力 1d 345.95 342.72 346.10 337.96 1187601.0 2.377957e+09 -0.0093 0.0280 + 3428 stock_sz_000338_2021-08-31 stock_sz_000338 2021-08-31 None 000338 潍柴动力 1d 344.41 342.41 351.02 336.73 1143985.0 2.295195e+09 -0.0009 0.0270 + 3429 stock_sz_000338_2021-09-01 stock_sz_000338 2021-09-01 None 000338 潍柴动力 1d 341.03 336.42 341.03 328.28 1218697.0 2.383841e+09 -0.0175 0.0287 + 3430 stock_sz_000338_2021-09-02 stock_sz_000338 2021-09-02 None 000338 潍柴动力 1d 336.88 339.03 340.88 329.67 1023545.0 2.012006e+09 0.0078 0.0241 + + [3431 rows x 15 columns] + + +e.g. Stock30mHfqKdata means China Stock 30 minutes hfq quotes. + +:: + + >>> from zvt.domain import Stock30mHfqKdata + >>> Stock30mHfqKdata.record_data(code='000338', provider='em') + >>> df = Stock30mHfqKdata.query_data(code='000338', provider='em') + >>> print(df) + + id entity_id timestamp provider code name level open close high low volume turnover change_pct turnover_rate + 0 stock_sz_000338_2022-01-07T10:00:00.000 stock_sz_000338 2022-01-07 10:00:00 em 000338 潍柴动力 30m 312.23 313.77 317.30 310.70 288036.0 521397671.0 0.0049 0.0057 + 1 stock_sz_000338_2022-01-07T10:30:00.000 stock_sz_000338 2022-01-07 10:30:00 em 000338 潍柴动力 30m 313.92 313.77 315.15 312.54 111887.0 201667653.0 0.0000 0.0022 + 2 stock_sz_000338_2022-01-07T11:00:00.000 stock_sz_000338 2022-01-07 11:00:00 em 000338 潍柴动力 30m 313.77 314.07 314.69 313.00 80072.0 144303962.0 0.0010 0.0016 + 3 stock_sz_000338_2022-01-07T11:30:00.000 stock_sz_000338 2022-01-07 11:30:00 em 000338 潍柴动力 30m 314.23 316.23 316.84 313.61 160797.0 291742498.0 0.0069 0.0032 + 4 stock_sz_000338_2022-01-07T13:30:00.000 stock_sz_000338 2022-01-07 13:30:00 em 000338 潍柴动力 30m 316.23 314.07 316.99 314.07 115775.0 210236422.0 -0.0068 0.0023 + .. ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... + 251 stock_sz_000338_2022-02-28T11:30:00.000 stock_sz_000338 2022-02-28 11:30:00 em 000338 潍柴动力 30m 268.15 268.30 268.61 267.99 34581.0 52053276.0 0.0006 0.0007 + 252 stock_sz_000338_2022-02-28T13:30:00.000 stock_sz_000338 2022-02-28 13:30:00 em 000338 潍柴动力 30m 268.46 268.46 268.61 268.15 38019.0 57268380.0 0.0006 0.0008 + 253 stock_sz_000338_2022-02-28T14:00:00.000 stock_sz_000338 2022-02-28 14:00:00 em 000338 潍柴动力 30m 268.46 269.22 269.53 268.30 41713.0 62994140.0 0.0028 0.0008 + 254 stock_sz_000338_2022-02-28T14:30:00.000 stock_sz_000338 2022-02-28 14:30:00 em 000338 潍柴动力 30m 269.22 269.22 269.53 268.61 40815.0 61676966.0 0.0000 0.0008 + 255 stock_sz_000338_2022-02-28T15:00:00.000 stock_sz_000338 2022-02-28 15:00:00 em 000338 潍柴动力 30m 269.07 269.84 269.84 268.76 60190.0 91032952.0 0.0023 0.0012 + + [256 rows x 15 columns] + +FinanceFactor +------------------------------ +The usage is same as other entity events. + +:: + + >>> from zvt.domain import FinanceFactor + >>> FinanceFactor.record_data(code='000338') + >>> FinanceFactor.query_data(code='000338',columns=FinanceFactor.important_cols(),index='timestamp') + + basic_eps total_op_income net_profit op_income_growth_yoy net_profit_growth_yoy roe rota gross_profit_margin net_margin timestamp + timestamp + 2002-12-31 NaN 1.962000e+07 2.471000e+06 NaN NaN NaN NaN 0.2068 0.1259 2002-12-31 + 2003-12-31 1.27 3.574000e+09 2.739000e+08 181.2022 109.8778 0.7729 0.1783 0.2551 0.0766 2003-12-31 + 2004-12-31 1.75 6.188000e+09 5.369000e+08 0.7313 0.9598 0.3245 0.1474 0.2489 0.0868 2004-12-31 + 2005-12-31 0.93 5.283000e+09 3.065000e+08 -0.1463 -0.4291 0.1327 0.0603 0.2252 0.0583 2005-12-31 + 2006-03-31 0.33 1.859000e+09 1.079000e+08 NaN NaN NaN NaN NaN 0.0598 2006-03-31 + ... ... ... ... ... ... ... ... ... ... ... + 2020-08-28 0.59 9.449000e+10 4.680000e+09 0.0400 -0.1148 0.0983 0.0229 0.1958 0.0603 2020-08-28 + 2020-10-31 0.90 1.474000e+11 7.106000e+09 0.1632 0.0067 0.1502 0.0347 0.1949 0.0590 2020-10-31 + 2021-03-31 1.16 1.975000e+11 9.207000e+09 0.1327 0.0112 0.1919 0.0444 0.1931 0.0571 2021-03-31 + 2021-04-30 0.42 6.547000e+10 3.344000e+09 0.6788 0.6197 0.0622 0.0158 0.1916 0.0667 2021-04-30 + 2021-08-31 0.80 1.264000e+11 6.432000e+09 0.3375 0.3742 0.1125 0.0287 0.1884 0.0653 2021-08-31 + + [66 rows x 10 columns] + +Three financial tables + +:: + + >>> BalanceSheet.record_data(code='000338') + >>> IncomeStatement.record_data(code='000338') + >>> CashFlowStatement.record_data(code='000338') + +.. note:: + Just remember, all :ref:`schema usage ` is in the same way. diff --git a/docs/source/data/extending_data.rst b/docs/source/data/extending_data.rst new file mode 100644 index 00000000..33584466 --- /dev/null +++ b/docs/source/data/extending_data.rst @@ -0,0 +1,383 @@ +.. _extending_data: + +============== +Extend data +============== + + +Data structure +-------------------------- +zvt use sql database(default sqlite, and easy extend to others) to store data +and provide uniform api to record and query data.(Thanks to the great `Sqlalchemy `_) + +As we know, :ref:`TradableEntity` and :ref:`ActorEntity` is everything about the market, data is the events happen on them. +The Schema of the data would always in following structure: + +.. image:: ../_static/data_structure.png + +It shows how we define the Entity in the market: + +* entity_type + +tradable entity type, e.g. stock, future, bond and so on. + +* exchange + +the exchange of entity + +* code + +the code of the entity + +Naturally the id of the entity is defined: + +:: + + {entity_type}_{exchange}_{code} + +And the common fields: + +* entity_id + +entity id + +* timestamp + +happen time + +* id + +the id of the record, most of time, it's: + +:: + +{entity_id}_{timestamp} + +Data definition +-------------------------- + +zvt data is defined by three concepts: + +* Database + +The rule is one module define one db. + +* Schema + +The Schema defined in the module belongs to the Database defined inside. + +* Provider + +The Database could have different providers + +Many pre supported data is in `zvt.domain modules `_, +e.g. `Finance data `_, you could +refer them when you want to extend data. + + +.. _extending_data.add_data: + +Key steps to add data +-------------------------- + +Let's show the key steps to add data. + +1. Intent +~~~~~~~~~~~~~~~~~~~~ +I want to add some data about the news on Stock. + +2. Find data source +~~~~~~~~~~~~~~~~~~~~ +The data source could be whatever you can see in the web ——— just write some +web crawlers to get them, e.g, most of `pre-implemented recorders `_. +And you could use data from commercial data provider in the same way, e.g. `joinquant recorders `_. + +Let's show the crawler way here: + +Open `eastmoney wap `_ and Find `news api `_ + +The news item result: + +:: + + { + "Art_ShowTime": "2022-02-09 14:12:30", + "Art_Image": "", + "Art_MediaName": "每日经济新闻", + "Art_Code": "202202092271136373", + "Art_Title": "首创证券维持索菲亚买入评级 公司简评报告:多重因素影响短期压制业绩 看好2022年利润修复", + "Art_SortStart": "1644387150036373", + "Art_VideoCount": 0, + "Art_OriginUrl": "http://finance.eastmoney.com/news/1354,202202092271136373.html", + "Art_Url": "http://finance.eastmoney.com/a/202202092271136373.html" + } + +3. Define Schema +~~~~~~~~~~~~~~~~~~~~ + +No matter what the format of the data outside zvt is, we use zvt simple and +uniform contract to define them. + +:: + + >>> from sqlalchemy import Column, String + >>> from sqlalchemy.orm import declarative_base + >>> from zvt.contract import Mixin + >>> from zvt.contract.register import register_schema + >>> NewsBase = declarative_base() + >>> class StockNews(NewsBase, Mixin): + >>> __tablename__ = "stock_news" + >>> #: news title + >>> news_title = Column(String) + >>> + >>> register_schema(providers=["em"], db_name="stock_news", schema_base=NewsBase, entity_type="stock") + +Check the defined db + +:: + + >>> StockNews.get_storages() + + [Engine(sqlite:////Users/foolcage/zvt-home/data/em_stock_news.db?check_same_thread=False)] + +As you see, the db file name format is: + +:: + + {provider}_{db_name} + +If you have another provider, e.g. sina, just register it too: + +:: + + >>> register_schema(providers=["em", "sina"], db_name="stock_news", schema_base=NewsBase, entity_type="stock") + +And you could find another db file: + +:: + + >>> StockNews.get_storages() + + [Engine(sqlite:////Users/foolcage/zvt-home/data/sina_stock_news.db?check_same_thread=False)] + +The advantage of this mechanism is: + +* schema is the way you want +* provider could be switched seamlessly + +4. Implement recorder +~~~~~~~~~~~~~~~~~~~~~ +Let's implement a recorder for StockNews. + +:: + + import pandas as pd + from zvt.contract.api import df_to_db + from zvt.contract.recorder import FixedCycleDataRecorder + from zvt.domain import Stock + from zvt.domain.misc.stock_news import StockNews + from zvt.recorders.em import em_api + + + class EMStockNewsRecorder(FixedCycleDataRecorder): + original_page_url = "https://wap.eastmoney.com/quote/stock/0.002572.html" + url = "https://np-listapi.eastmoney.com/comm/wap/getListInfo?cb=callback&client=wap&type=1&mTypeAndCode=0.002572&pageSize=200&pageIndex={}&callback=jQuery1830017478247906740352_1644568731256&_=1644568879493" + + # the entity class you record for + entity_schema = Stock + # connect schema with recorder + data_schema = StockNews + # entity data provider + entity_provider = "em" + # data schema provider + provider = "em" + + def record(self, entity, start, end, size, timestamps): + news = em_api.get_news(entity_id=entity.id) + df = pd.DataFrame.from_records(news) + self.logger.info(df) + df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) + + +After that, you could use the data in zvt way: + +:: + + >>> StockNews.record_data(provider="em", entity_id="stock_sz_002572") + >>> df = StockNews.query_data(entity_id="stock_sz_002572") + >>> print(df) + + id entity_id timestamp news_title + 0 stock_sz_002572_2019-05-20 19:50:42 stock_sz_002572 2019-05-20 19:50:42 索菲亚(002572.SZ):股价回撤超65% 是低吸机会吗? + 1 stock_sz_002572_2019-05-24 22:39:47 stock_sz_002572 2019-05-24 22:39:47 索菲亚拟发行不超过5亿元超短期融资券 + 2 stock_sz_002572_2019-05-24 22:48:54 stock_sz_002572 2019-05-24 22:48:54 索菲亚(002572.SZ)拟终止发行不超10亿元的可转换公司债券 + 3 stock_sz_002572_2019-06-04 14:06:42 stock_sz_002572 2019-06-04 14:06:42 索菲亚(002572.SZ)截至5月底已累计回购2.02亿元的股份 + 4 stock_sz_002572_2019-06-13 06:24:44 stock_sz_002572 2019-06-13 06:24:44 索菲亚功臣王飚能否扶起汉森中国 + .. ... ... ... ... + 367 stock_sz_002572_2022-02-07 14:04:40 stock_sz_002572 2022-02-07 14:04:40 开源证券维持索菲亚买入评级 近期获6份券商研报关注 目标均价涨幅59.82% + 368 stock_sz_002572_2022-02-07 15:32:46 stock_sz_002572 2022-02-07 15:32:46 【调研快报】索菲亚接待机构投资者调研 + 369 stock_sz_002572_2022-02-08 16:50:37 stock_sz_002572 2022-02-08 16:50:37 索菲亚:公司承接了容东片区安置房项目 + 370 stock_sz_002572_2022-02-08 21:59:00 stock_sz_002572 2022-02-08 21:59:00 9亿坏账拖累净利下降九成 家具巨头索菲亚“甩包袱”起跑 腰斩的股价能否趁势抬头? + 371 stock_sz_002572_2022-02-09 14:12:30 stock_sz_002572 2022-02-09 14:12:30 首创证券维持索菲亚买入评级 公司简评报告:多重因素影响短期压制业绩 看好2022年利润修复 + + [372 rows x 4 columns] + + +.. _extending_data.tradable_entity: + +Add new TradableEntity +-------------------------- +It's human nature to like the new and hate the old. Adding new TradableEntity is easy in zvt. + +And from a higher perspective, trading is everywhere. you make trading everytime when you make the +decision. + +So you could treat Country as TradableEntity and make trading when making decision where to live or invest. + +Let's show the key steps to add new TradableEntity. + +1. Define entity Schema +~~~~~~~~~~~~~~~~~~~~~~~ + +:: + + # -*- coding: utf-8 -*- + + from sqlalchemy import Column, String, Float + from sqlalchemy.orm import declarative_base + + from zvt.contract.schema import TradableEntity + from zvt.contract.register import register_schema, register_entity + + CountryMetaBase = declarative_base() + + + @register_entity(entity_type="country") + class Country(CountryMetaBase, TradableEntity): + __tablename__ = "country" + + #: 区域 + #: region + region = Column(String(length=128)) + #: 首都 + #: capital city + capital_city = Column(String(length=128)) + #: 收入水平 + #: income level + income_level = Column(String(length=64)) + #: 贷款类型 + #: lending type + lending_type = Column(String(length=64)) + #: 经度 + #: longitude + longitude = Column(Float) + #: 纬度 + #: latitude + latitude = Column(Float) + + + register_schema(providers=["wb"], db_name="country_meta", schema_base=CountryMetaBase) + +entity_type, exchange and code define the entity, for country, it's in following way: + +:: + + entity_type = "country" + exchange = "galaxy" + code = "iso code" + +e.g. country_galaxy_CN = China, country_galaxy_US = United States of America + + +2. Implement recorder for the entity +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +:: + + from zvt.contract.api import df_to_db + from zvt.contract.recorder import Recorder + from zvt.domain.meta.country_meta import Country + from zvt.recorders.wb import wb_api + + + class WBCountryRecorder(Recorder): + provider = "wb" + data_schema = Country + + def run(self): + df = wb_api.get_countries() + df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) + +3. Define schema for the entity +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +e.g treasury yield of the country +:: + + # -*- coding: utf-8 -*- + from sqlalchemy import Column, String, Float + from sqlalchemy.orm import declarative_base + + from zvt.contract import Mixin + from zvt.contract.register import register_schema + + CurrencyBase = declarative_base() + + + class TreasuryYield(CurrencyBase, Mixin): + __tablename__ = "treasury_yield" + + code = Column(String(length=32)) + + # 2年期 + yield_2 = Column(Float) + # 5年期 + yield_5 = Column(Float) + # 10年期 + yield_10 = Column(Float) + # 30年期 + yield_30 = Column(Float) + + + register_schema(providers=["em"], db_name="currency", schema_base=CurrencyBase) + +And the `recorder `_ for the schema + +4. Use them in zvt way +~~~~~~~~~~~~~~~~~~~~~~ + +Find the rich country: + +:: + + >>> from zvt.domain import Country + >>> Country.record_data() + >>> df = Country.query_data() + >>> df[df['income_level']=='High income'] + + id entity_id timestamp entity_type exchange code name list_date end_date region capital_city income_level lending_type longitude latitude + 0 country_galaxy_AW country_galaxy_AW None country galaxy AW Aruba None None Latin America & Caribbean Oranjestad High income Not classified -70.016700 12.516700 + 7 country_galaxy_AD country_galaxy_AD None country galaxy AD Andorra None None Europe & Central Asia Andorra la Vella High income Not classified 1.521800 42.507500 + 9 country_galaxy_AE country_galaxy_AE None country galaxy AE United Arab Emirates None None Middle East & North Africa Abu Dhabi High income Not classified 54.370500 24.476400 + 13 country_galaxy_AG country_galaxy_AG None country galaxy AG Antigua and Barbuda None None Latin America & Caribbean Saint John's High income IBRD -61.845600 17.117500 + 14 country_galaxy_AU country_galaxy_AU None country galaxy AU Australia None None East Asia & Pacific Canberra High income Not classified 149.129000 -35.282000 + .. ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... + 277 country_galaxy_TW country_galaxy_TW None country galaxy TW Taiwan, China None None East Asia & Pacific High income Not classified NaN NaN + 282 country_galaxy_UY country_galaxy_UY None country galaxy UY Uruguay None None Latin America & Caribbean Montevideo High income IBRD -56.067500 -34.894100 + 283 country_galaxy_US country_galaxy_US None country galaxy US United States None None North America Washington D.C. High income Not classified -77.032000 38.889500 + 287 country_galaxy_VG country_galaxy_VG None country galaxy VG British Virgin Islands None None Latin America & Caribbean Road Town High income Not classified -64.623056 18.431389 + 288 country_galaxy_VI country_galaxy_VI None country galaxy VI Virgin Islands (U.S.) None None Latin America & Caribbean Charlotte Amalie High income Not classified -64.896300 18.335800 + + [80 rows x 15 columns] + + +Compare treasury yields of different maturities: + +:: + + >>> from zvt.domain import TreasuryYield + >>> from zvt.api.intent import compare + >>> TreasuryYield.record_data() + >>> compare(codes=["US"], schema=TreasuryYield, columns=["yield_2", "yield_10", "yield_30"]) + +.. image:: ../_static/compare_yields.png \ No newline at end of file diff --git a/docs/source/data/index.rst b/docs/source/data/index.rst new file mode 100644 index 00000000..5ed8a7f7 --- /dev/null +++ b/docs/source/data/index.rst @@ -0,0 +1,25 @@ +========== +Data +========== + +| Without data, quant becomes a castle in the air. +| So, what exactly is data? +| zvt makes a concise and unified abstraction of data: + + In the world of zvt, there are two kinds of entities, one is :ref:`tradable entity `, + the other is :ref:`actor entity `. Data is the events happened on them. + +.. image:: ../_static/view.png + +.. note:: + + Philosophically, entity is the existence described by itself, classification of existential concepts. + +.. toctree:: + :maxdepth: 2 + + data_concepts + record_and_query + extending_data + adding_new_entity + trading_anything diff --git a/docs/source/data/record_and_query.rst b/docs/source/data/record_and_query.rst new file mode 100644 index 00000000..0453f98b --- /dev/null +++ b/docs/source/data/record_and_query.rst @@ -0,0 +1,17 @@ +.. _record_and_query: + +============== +Record & query +============== + + +Dualism +-------------------------- +In and out, write and read, and so record and query. + + +Record data +-------------------------- + +Query data +-------------------------- diff --git a/docs/source/data/trading_anything.rst b/docs/source/data/trading_anything.rst new file mode 100644 index 00000000..261d60bc --- /dev/null +++ b/docs/source/data/trading_anything.rst @@ -0,0 +1,162 @@ +.. trading_anything: + +======================= +Capital without country +======================= + +From a higher perspective, trading is everywhere. you make trading everytime when you make the +decision. + +So you could treat Country as TradableEntity and make trading when making decision where to live or invest. + +It's nothing than a specific case of :ref:`Adding new entity `. + +Let's show the key steps below. + +Define entity Schema +-------------------------- + +:: + + # -*- coding: utf-8 -*- + + from sqlalchemy import Column, String, Float + from sqlalchemy.orm import declarative_base + + from zvt.contract.schema import TradableEntity + from zvt.contract.register import register_schema, register_entity + + CountryMetaBase = declarative_base() + + + @register_entity(entity_type="country") + class Country(CountryMetaBase, TradableEntity): + __tablename__ = "country" + + #: 区域 + #: region + region = Column(String(length=128)) + #: 首都 + #: capital city + capital_city = Column(String(length=128)) + #: 收入水平 + #: income level + income_level = Column(String(length=64)) + #: 贷款类型 + #: lending type + lending_type = Column(String(length=64)) + #: 经度 + #: longitude + longitude = Column(Float) + #: 纬度 + #: latitude + latitude = Column(Float) + + + register_schema(providers=["wb"], db_name="country_meta", schema_base=CountryMetaBase) + +entity_type, exchange and code define the entity, for country, it's in following way: + +:: + + entity_type = "country" + exchange = "galaxy" + code = "iso code" + +e.g. country_galaxy_CN = China, country_galaxy_US = United States of America + + +Implement recorder for the entity +--------------------------------- + +:: + + from zvt.contract.api import df_to_db + from zvt.contract.recorder import Recorder + from zvt.domain.meta.country_meta import Country + from zvt.recorders.wb import wb_api + + + class WBCountryRecorder(Recorder): + provider = "wb" + data_schema = Country + + def run(self): + df = wb_api.get_countries() + df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) + +Define schema for the entity +---------------------------- + +e.g treasury yield of the country +:: + + # -*- coding: utf-8 -*- + from sqlalchemy import Column, String, Float + from sqlalchemy.orm import declarative_base + + from zvt.contract import Mixin + from zvt.contract.register import register_schema + + MonetaryBase = declarative_base() + + + class TreasuryYield(MonetaryBase, Mixin): + __tablename__ = "treasury_yield" + + code = Column(String(length=32)) + + # 2年期 + yield_2 = Column(Float) + # 5年期 + yield_5 = Column(Float) + # 10年期 + yield_10 = Column(Float) + # 30年期 + yield_30 = Column(Float) + + + register_schema(providers=["em"], db_name="monetary", schema_base=MonetaryBase) + # the __all__ is generated + __all__ = ["TreasuryYield"] + +And the `recorder `_ for the schema + +Use them in zvt way +------------------- + +Find the rich country: + +:: + + >>> from zvt.domain import Country + >>> Country.record_data() + >>> df = Country.query_data() + >>> df[df['income_level']=='High income'] + + id entity_id timestamp entity_type exchange code name list_date end_date region capital_city income_level lending_type longitude latitude + 0 country_galaxy_AW country_galaxy_AW None country galaxy AW Aruba None None Latin America & Caribbean Oranjestad High income Not classified -70.016700 12.516700 + 7 country_galaxy_AD country_galaxy_AD None country galaxy AD Andorra None None Europe & Central Asia Andorra la Vella High income Not classified 1.521800 42.507500 + 9 country_galaxy_AE country_galaxy_AE None country galaxy AE United Arab Emirates None None Middle East & North Africa Abu Dhabi High income Not classified 54.370500 24.476400 + 13 country_galaxy_AG country_galaxy_AG None country galaxy AG Antigua and Barbuda None None Latin America & Caribbean Saint John's High income IBRD -61.845600 17.117500 + 14 country_galaxy_AU country_galaxy_AU None country galaxy AU Australia None None East Asia & Pacific Canberra High income Not classified 149.129000 -35.282000 + .. ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... + 277 country_galaxy_TW country_galaxy_TW None country galaxy TW Taiwan, China None None East Asia & Pacific High income Not classified NaN NaN + 282 country_galaxy_UY country_galaxy_UY None country galaxy UY Uruguay None None Latin America & Caribbean Montevideo High income IBRD -56.067500 -34.894100 + 283 country_galaxy_US country_galaxy_US None country galaxy US United States None None North America Washington D.C. High income Not classified -77.032000 38.889500 + 287 country_galaxy_VG country_galaxy_VG None country galaxy VG British Virgin Islands None None Latin America & Caribbean Road Town High income Not classified -64.623056 18.431389 + 288 country_galaxy_VI country_galaxy_VI None country galaxy VI Virgin Islands (U.S.) None None Latin America & Caribbean Charlotte Amalie High income Not classified -64.896300 18.335800 + + [80 rows x 15 columns] + + +Compare treasury yields of different maturities: + +:: + + >>> from zvt.domain import TreasuryYield + >>> from zvt.api.intent import compare + >>> TreasuryYield.record_data() + >>> compare(codes=["US"], schema=TreasuryYield, columns=["yield_2", "yield_10", "yield_30"]) + +.. image:: ../_static/compare_yields.png \ No newline at end of file diff --git a/docs/source/drawer/drawer_concepts.rst b/docs/source/drawer/drawer_concepts.rst new file mode 100644 index 00000000..3b4581d6 --- /dev/null +++ b/docs/source/drawer/drawer_concepts.rst @@ -0,0 +1,39 @@ +=============== +Drawer concepts +=============== + + +Intent +------------------------------ +There are many kinds of charts, but the intent is much less. zvt classify +intent as compare, distribute and composite. + +We could analyze the entity itself or look at it in the whole market by +comparing with others. :ref:`NormalData` uses +different structure to express different intents. + +Compare +------------------------------ +Based on zvt's world view and data model, you could compare anything you care about. + +Think about that you want to compare S&P 500 index with Shanghai index. It's easy +and comfortable with no pain. + +:: + + >>> from zvt.api.intent import compare + >>> from zvt.domain import Indexus1dKdata, Index, Indexus, Index1dKdata + >>> Index.record_data(provider="em") + >>> Indexus.record_data(provider="em") + >>> Index1dKdata.record_data(entity_id="index_sh_000001", provider="em") + >>> Indexus1dKdata.record_data(entity_id="indexus_us_SPX", provider="em") + >>> compare(entity_ids=["index_sh_000001", "indexus_us_SPX"], start_timestamp="2000-01-01", scale_value=100) + + +.. image:: ../_static/compare_cn_us.png + +Try some other style: + +:: + + >>> compare(entity_ids=["index_sh_000001", "indexus_us_SPX"], start_timestamp="2000-01-01", schema_map_columns={Index1dKdata:["close"],Indexus1dKdata:["close"]}) diff --git a/docs/source/drawer/index.rst b/docs/source/drawer/index.rst new file mode 100644 index 00000000..e81a1026 --- /dev/null +++ b/docs/source/drawer/index.rst @@ -0,0 +1,16 @@ +========== +Drawer +========== + +| A picture is worth a thousand words. +| So, what exactly is behind the picture? +| it's drawer in zvt: + + Drawer is who can draw the picture with NormalData by Intent. + +.. toctree:: + :maxdepth: 2 + + drawer_concepts + + diff --git a/docs/source/factor/extending_factor.rst b/docs/source/factor/extending_factor.rst new file mode 100644 index 00000000..c6c79178 --- /dev/null +++ b/docs/source/factor/extending_factor.rst @@ -0,0 +1,204 @@ +.. _factor.extending_factor: + +================ +Extending Factor +================ + +Rethink NormalData +-------------------------- +Normal data format is as below: + +.. image:: ../_static/normal_data.png + +| Why use this format? +| The reason is that human is comfortable for two-dimensional space and + high-dimensional space could be reduced to it. + + +Obviously, It's complete and consistent. You could **calculate oneself in time intervals** +or **calculate with others in specific time or intervals**. And it's easy to analyze the +data with charts by Intent. + +Factor data structure +-------------------------- +The factor computing flow is as below: + +.. image:: ../_static/factor_flow.png + +* data_df + +NormalData df read from the schema. + +* factor_df + +NormalData df computed by data_df using use :class:`~.zvt.contract.factor.Transformer`, :class:`~.zvt.contract.factor.Accumulator` +or your custom logic. + +* result_df + +NormalData df containing columns **filter_result** and(or) **score_result** +which calculated using factor_df or(and) data_df. +Filter_result is True or False, score_result is from 0 to 1. +You could use TargetSelector to select targets at specific time when +filter_result is True and(or) score_result >=0.8 by default or do more +precise control by setting other arguments. + +Let's take BullFactor to illustrate the calculation process: +:: + + >>> from zvt.factors.macd import BullFactor + >>> from zvt.domain import Stock1dHfqKdata + >>> entity_ids = ["stock_sh_601318", "stock_sz_000338"] + >>> Stock1dHfqKdata.record_data(entity_ids=entity_ids, provider="em") + >>> factor = BullFactor(entity_ids=entity_ids, provider="em", start_timestamp='2019-01-01', end_timestamp='2019-06-10') + +check the dfs: +:: + + >>> factor.data_df + >>> factor.factor_df + >>> factor.result_df + +.. _factor.write_transformer: + +Write transformer +-------------------------- +Transformer works as bellow: + +.. image:: ../_static/transformer.png + +What's in your mind is the NormalData format, and then practice the skills to +manipulate it. + +You could use other ta lib with it easily, e.g Bollinger Bands using `TA lib `_ + +install ta at first: +:: + + pip install --upgrade ta + +write boll transformer and factor: +:: + + from typing import Optional, List + + import pandas as pd + from ta.volatility import BollingerBands + + from zvt.contract.factor import * + from zvt.factors import TechnicalFactor + + + class BollTransformer(Transformer): + def transform_one(self, entity_id, df: pd.DataFrame) -> pd.DataFrame: + indicator_bb = BollingerBands(close=df["close"], window=20, window_dev=2) + + # Add Bollinger Bands features + df["bb_bbm"] = indicator_bb.bollinger_mavg() + df["bb_bbh"] = indicator_bb.bollinger_hband() + df["bb_bbl"] = indicator_bb.bollinger_lband() + + # Add Bollinger Band high indicator + df["bb_bbhi"] = indicator_bb.bollinger_hband_indicator() + + # Add Bollinger Band low indicator + df["bb_bbli"] = indicator_bb.bollinger_lband_indicator() + + # Add Width Size Bollinger Bands + df["bb_bbw"] = indicator_bb.bollinger_wband() + + # Add Percentage Bollinger Bands + df["bb_bbp"] = indicator_bb.bollinger_pband() + return df + + + class BollFactor(TechnicalFactor): + transformer = BollTransformer() + + def drawer_factor_df_list(self) -> Optional[List[pd.DataFrame]]: + # set the factor to show + return [self.factor_df[["bb_bbm", "bb_bbh", "bb_bbl"]]] + + def compute_result(self): + # set filter_result, which bb_bbli=1.0 buy and bb_bbhi=1.0 sell + super().compute_result() + self.result_df = (self.factor_df["bb_bbli"] - self.factor_df["bb_bbhi"]).to_frame(name="filter_result") + self.result_df[self.result_df == 0] = None + self.result_df[self.result_df == 1] = True + self.result_df[self.result_df == -1] = False + +Let's show it: +:: + + >>> from zvt.domain import Stock1dHfqKdata + + >>> provider = "em" + >>> entity_ids = ["stock_sz_000338", "stock_sh_601318"] + >>> Stock1dHfqKdata.record_data(entity_ids=entity_ids, provider=provider,) + >>> factor = BollFactor( + >>> entity_ids=entity_ids, provider=provider, entity_provider=provider, start_timestamp="2019-01-01" + >>> ) + >>> factor.draw(show=True) + +.. image:: ../_static/boll_factor.png + +Most of ta lib support calculate single target by default, so we implement +transform_one of the Transformer. If you want to calculate many targets at +the same time you could implement transform directly and it would be faster. + +And Transformer is stateless, so it's easy to reuse in different factor if need. + +Factor with IntervalLevel +-------------------------- +After you write the transformer and construct the factor, it's easy to use in +different levels. + +Let's use BollFactor in IntervalLevel 30m: +:: + + >>> from zvt.domain import Stock30mHfqKdata + + >>> provider = "em" + >>> entity_ids = ["stock_sz_000338", "stock_sh_601318"] + + >>> Stock30mHfqKdata.record_data(entity_ids=entity_ids, provider=provider) + >>> factor = BollFactor( + entity_ids=entity_ids, provider=provider, entity_provider=provider, start_timestamp="2021-01-01" + ) + >>> factor.draw(show=True) + +Stream computing +-------------------------- +The data is coming continuously and the factor using the data need computing +continuously too. + +It's simple and straightforward: + +* {Schema}.record_data in one process +* {Factor}.move_on which call {Schema}.query_data in another process + +.. image:: ../_static/stream.png + +We keep the simple enough philosophy: single process and thread. Enjoy +programming and make everything clear. + +Factor persistence +-------------------------- +Getting data and computing factor continuously is cool. +But...If It took a long time to calculate the factor and crashed.How would you feel? + +.. image:: ../_static/bear.gif + :align: center + +Select the targets +-------------------------- +You could select the targets according result_df of the factor by yourself. +Or use TargetSelector do it for you. + +.. image:: ../_static/factor_result.png + + +Write accumulator +-------------------------- + +.. image:: ../_static/accumulator.png diff --git a/docs/source/factor/factor_concepts.rst b/docs/source/factor/factor_concepts.rst new file mode 100644 index 00000000..2ab10099 --- /dev/null +++ b/docs/source/factor/factor_concepts.rst @@ -0,0 +1,168 @@ +.. _factor.factor_concepts: + +=============== +Factor concepts +=============== + +.. _factor.normal_data: + +Normal data +------------------------------ +:class:`~.zvt.contract.normal_data.NormalData` is the data containing pandas dataframe +with multiple index which level 0 named entity_id and level 1 named timestamp: + +=============== ========== ===== ===== ===== ===== +entity_id timestamp col1 col2 col3 col4 +=============== ========== ===== ===== ===== ===== +stock_sz_000338 2020-05-05 1.2 0.5 0.3 a +... 2020-05-06 1.0 0.7 0.2 b +stock_sz_000778 2020-05-05 1.2 0.5 0.3 a +... 2020-05-06 1.0 0.7 0.2 b +=============== ========== ===== ===== ===== ===== + +This data structure is used heavily in zvt computing, you should be familiar with it. +`Pandas multiple index guide `_ is +a good start. + +Query data returning normal data in this way: + +:: + + {Schema}.query_data(index=["entity_id, timestamp"]) + +e.g. + +:: + + >>> from zvt.domain import * + >>> entity_ids = ["stock_sz_000338", "stock_sz_000001"] + >>> Stock1dHfqKdata.record_data(entity_ids=entity_ids, provider="em") + >>> df = Stock1dHfqKdata.query_data(entity_ids=entity_ids, provider="em", index=["entity_id", "timestamp"]) + >>> print(df) + + id entity_id timestamp provider code name level open close high low volume turnover change_pct turnover_rate + entity_id timestamp + stock_sz_000001 1991-04-03 stock_sz_000001_1991-04-03 stock_sz_000001 1991-04-03 em 000001 平安银行 1d 49.00 49.00 49.00 49.00 1.0 5.000000e+03 0.2250 0.0000 + 1991-04-04 stock_sz_000001_1991-04-04 stock_sz_000001 1991-04-04 em 000001 平安银行 1d 48.76 48.76 48.76 48.76 3.0 1.500000e+04 -0.0049 0.0000 + 1991-04-05 stock_sz_000001_1991-04-05 stock_sz_000001 1991-04-05 em 000001 平安银行 1d 48.52 48.52 48.52 48.52 2.0 1.000000e+04 -0.0049 0.0000 + 1991-04-06 stock_sz_000001_1991-04-06 stock_sz_000001 1991-04-06 em 000001 平安银行 1d 48.28 48.28 48.28 48.28 7.0 3.400000e+04 -0.0049 0.0000 + 1991-04-08 stock_sz_000001_1991-04-08 stock_sz_000001 1991-04-08 em 000001 平安银行 1d 48.04 48.04 48.04 48.04 2.0 1.000000e+04 -0.0050 0.0000 + ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... + stock_sz_000338 2022-01-17 stock_sz_000338_2022-01-17 stock_sz_000338 2022-01-17 em 000338 潍柴动力 1d 296.26 297.64 298.71 293.49 504866.0 8.546921e+08 0.0026 0.0100 + 2022-01-18 stock_sz_000338_2022-01-18 stock_sz_000338 2022-01-18 em 000338 潍柴动力 1d 298.10 300.87 302.71 296.10 622455.0 1.064735e+09 0.0109 0.0124 + 2022-01-19 stock_sz_000338_2022-01-19 stock_sz_000338 2022-01-19 em 000338 潍柴动力 1d 299.64 299.48 304.24 298.56 610096.0 1.049195e+09 -0.0046 0.0121 + 2022-01-20 stock_sz_000338_2022-01-20 stock_sz_000338 2022-01-20 em 000338 潍柴动力 1d 298.10 294.87 299.18 290.11 812949.0 1.361764e+09 -0.0154 0.0161 + 2022-01-21 stock_sz_000338_2022-01-21 stock_sz_000338 2022-01-21 em 000338 潍柴动力 1d 292.72 287.04 293.34 284.58 754156.0 1.234360e+09 -0.0266 0.0150 + + [10878 rows x 15 columns] + + +.. _factor.factor: + +Factor +------------------------------ +:class:`~.zvt.contract.factor.Factor` is a computing facility to build *factor* according your mind ——— algorithm. +It reads data from schema, use :class:`~.zvt.contract.factor.Transformer`, :class:`~.zvt.contract.factor.Accumulator` +or your custom logic to compute, and save the result to new schema if need. +It also provides a standard way to evaluate the targets which could be used by :class:`~.zvt.factors.target_selector.TargetSelector` +and :class:`~.zvt.trader.trader.Trader` for backtesting or real trading. + +Transformer +------------------------------ +Computing factor which depends on input data only. +Here is an example: :class:`~.zvt.factors.algorithm.MaTransformer` + +Accumulator +------------------------------ +Computing factor which depends on input data and previous result of the factor. +Here is an example: :class:`~.zvt.factors.ma.ma_stats_factor.MaStatsAccumulator.` + +Let's have a look by example: + +:: + + >>> from zvt.factors import GoldCrossFactor + >>> from zvt.domain import Stock1dHfqKdata + >>> entity_ids = ["stock_sz_000338"] + >>> Stock1dHfqKdata.record_data(entity_ids=entity_ids, provider="em") + >>> factor = GoldCrossFactor(entity_ids=entity_ids, provider="em", start_timestamp="2018-01-01") + >>> print(factor.factor_df) + >>> print(factor.result_df) + >>> factor.draw(show=True) + level turnover high id open low entity_id timestamp close turnover_rate volume diff dea macd live bull live_count + entity_id timestamp + stock_sz_000338 2018-01-02 1d 8.325588e+08 145.97 stock_sz_000338_2018-01-02 141.21 141.06 stock_sz_000338 2018-01-02 145.67 0.0225 972471.0 NaN NaN NaN -1 False -1 + 2018-01-03 1d 7.530370e+08 147.66 stock_sz_000338_2018-01-03 146.13 144.29 stock_sz_000338 2018-01-03 144.44 0.0202 870225.0 NaN NaN NaN -1 False -2 + 2018-01-04 1d 4.917067e+08 145.51 stock_sz_000338_2018-01-04 144.75 143.67 stock_sz_000338 2018-01-04 145.21 0.0133 574335.0 NaN NaN NaN -1 False -3 + 2018-01-05 1d 5.282211e+08 146.59 stock_sz_000338_2018-01-05 146.44 143.21 stock_sz_000338 2018-01-05 143.21 0.0143 616244.0 NaN NaN NaN -1 False -4 + 2018-01-08 1d 1.255871e+09 150.43 stock_sz_000338_2018-01-08 143.82 143.82 stock_sz_000338 2018-01-08 150.12 0.0331 1426567.0 NaN NaN NaN -1 False -5 + ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... + 2022-01-17 1d 8.546921e+08 298.71 stock_sz_000338_2022-01-17 296.26 293.49 stock_sz_000338 2022-01-17 297.64 0.0100 504866.0 -1.386687 1.781615 -6.336603 -1 False -12 + 2022-01-18 1d 1.064735e+09 302.71 stock_sz_000338_2022-01-18 298.10 296.10 stock_sz_000338 2022-01-18 300.87 0.0124 622455.0 -1.694421 1.086407 -5.561657 -1 False -13 + 2022-01-19 1d 1.049195e+09 304.24 stock_sz_000338_2022-01-19 299.64 298.56 stock_sz_000338 2022-01-19 299.48 0.0121 610096.0 -2.027097 0.463707 -4.981607 -1 False -14 + 2022-01-20 1d 1.361764e+09 299.18 stock_sz_000338_2022-01-20 298.10 290.11 stock_sz_000338 2022-01-20 294.87 0.0161 812949.0 -2.632389 -0.155513 -4.953753 -1 False -15 + 2022-01-21 1d 1.234360e+09 293.34 stock_sz_000338_2022-01-21 292.72 284.58 stock_sz_000338 2022-01-21 287.04 0.0150 754156.0 -3.701237 -0.864657 -5.673159 -1 False -16 + + [987 rows x 17 columns] + filter_result + entity_id timestamp + stock_sz_000338 2018-01-02 False + 2018-01-03 False + 2018-01-04 False + 2018-01-05 False + 2018-01-08 False + ... ... + 2022-01-17 False + 2022-01-18 False + 2022-01-19 False + 2022-01-20 False + 2022-01-21 False + + [987 rows x 1 columns] + +.. image:: ../_static/factor_draw.png + +Follow :ref:`Extending factor ` to do the funny part. + + +TargetSelector +------------------------------ +The class select targets according to Factors. +You could calculate factors in the whole market and use selector to choose the targets. + +:: + + from zvt.contract import IntervalLevel + from zvt.factors.target_selector import TargetSelector + from zvt.factors.ma.ma_factor import CrossMaFactor + + entity_ids = ["stock_sz_000338"] + entity_type = "stock" + start_timestamp = "2018-01-01" + end_timestamp = "2019-06-30" + my_selector = TargetSelector( + entity_ids=entity_ids, entity_schema=entity_type, start_timestamp=start_timestamp, end_timestamp=end_timestamp + ) + # add the factors + my_selector.add_factor( + CrossMaFactor( + entity_provider="em", + provider="em", + entity_ids=entity_ids, + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + computing_window=10, + windows=[5, 10], + need_persist=False, + level=IntervalLevel.LEVEL_1DAY, + adjust_type="hfq", + ) + ) + my_selector.run() + print(my_selector.open_long_df) + print(my_selector.open_short_df) + my_selector.get_open_long_targets("2019-06-27") + +If not set entity_ids arguments, the selected targets would be in whole market. +And it provides get_open_long_targets function to select targets on the timestamp. +For multiple targets backtesting, this pre computed factor would be very fast. \ No newline at end of file diff --git a/docs/source/factor/index.rst b/docs/source/factor/index.rst new file mode 100644 index 00000000..2cedeac9 --- /dev/null +++ b/docs/source/factor/index.rst @@ -0,0 +1,17 @@ +========== +Factor +========== + +| Without factor, the heart of quant is empty. +| So, what exactly is factor? +| zvt makes a concise and unified abstraction of factor: + + In the world of zvt, factor is data describing market. + It's computed from from Schema, and save as new Schema if need. + +.. toctree:: + :maxdepth: 2 + + factor_concepts + extending_factor + diff --git a/docs/source/index.rst b/docs/source/index.rst new file mode 100644 index 00000000..86d36672 --- /dev/null +++ b/docs/source/index.rst @@ -0,0 +1,25 @@ +Welcome to zvt +============== + +Contents +-------- + +.. toctree:: + :maxdepth: 2 + + install + intro + data/index + factor/index + trader/index + ml/index + drawer/index + contributor + api/index + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/docs/source/install.rst b/docs/source/install.rst new file mode 100644 index 00000000..e45d0d5f --- /dev/null +++ b/docs/source/install.rst @@ -0,0 +1,38 @@ +.. _install: + +Installation of zvt +======================== + +This part of the documentation covers the installation of zvt. +The first step to using any software package is getting it properly installed. + + +Python version support +---------------------- + +Officially Python 3.7, and 3.8. + + +$ python -m pip install -U zvt +-------------------------------- + +To install zvt, simply run this simple command in your terminal of choice:: + + $ python -m pip install -U zvt + + +Get the Source Code +------------------- + +zvt is actively developed on GitHub, where the code is +`always available `_. + +You can clone the public repository:: + + $ git clone git://github.com/zvtvz/zvt.git + +Once you have a copy of the source, you can embed it in your own Python +package, or install it into your site-packages easily:: + + $ cd zvt + $ python -m pip install . diff --git a/docs/source/intro.rst b/docs/source/intro.rst new file mode 100644 index 00000000..fa842bae --- /dev/null +++ b/docs/source/intro.rst @@ -0,0 +1,36 @@ +========== +Intro +========== + +This is a short introduction to zvt, you would learn the basic usage and +glance a global view here. + +Rethink market and programming +------------------------------ +For practical trading, complex algorithm is fragile, complex algorithm building +on complex facility is more fragile, complex algorithm building on complex +facility by complex team is more and more fragile. + +zvt want to provide a simple facility for building straightforward algorithm, it +should be: + +* **use the most basic programming concepts** + +* **concise abstraction of the market** + +* **correctness is obvious** + +Core concepts building zvt +------------------------------ +| Technologies come and technologies go, but market insight is forever. +| Your world is built by core concepts inside you, so it's you. +| zvt world is built by core concepts inside market, so it's zvt. +| We would show how core concepts building zvt in four aspects: + +.. toctree:: + :maxdepth: 2 + + data/index + factor/index + trader/index + ml/index diff --git a/docs/source/ml/index.rst b/docs/source/ml/index.rst new file mode 100644 index 00000000..57b3676b --- /dev/null +++ b/docs/source/ml/index.rst @@ -0,0 +1,30 @@ +==================== +Machine learning +==================== + +| In today's world, you're not fashion without machine learning. +| So, what could zvt do with ML? +| Being a fashion-feeling person, zvt love ML in this way: + + zvt provide a simple facility to define X and y. + you could adapt any sophisticated ML library to zvt. + +Tagger +------------------------------ +:class:`~.zvt.tag.tag.Tagger` is a computing facility for classifying +TradableEntity by different dimensions as tag which could be used as +ml category feature. + +People are divided into groups, things are grouped together. + +.. image:: ../_static/tag.png + +The most important part for trading is which category is suitable to trade +now. ML could help you find which category perform better in different cases +——— deterministic factor. + +The tag could be **priori** or calculated from characteristics of the entity ——— some factor. + +MLMachine +------------------------------ +the ml engine. diff --git a/docs/source/trader/index.rst b/docs/source/trader/index.rst new file mode 100644 index 00000000..02d1f426 --- /dev/null +++ b/docs/source/trader/index.rst @@ -0,0 +1,16 @@ +========== +Trader +========== + +| What we want in the end is to trade and make money. +| So, what exactly is Trader? + + Trader is a simple facility helping you backtesting and generating trading signals + using Factor, TargetSelector, MLMachine or you custom free style algorithm. + +.. toctree:: + :maxdepth: 2 + + trader_concepts + extending_trader + diff --git a/docs/source/trader/trader_concepts.rst b/docs/source/trader/trader_concepts.rst new file mode 100644 index 00000000..69a1029c --- /dev/null +++ b/docs/source/trader/trader_concepts.rst @@ -0,0 +1,11 @@ +==================== +Trader +==================== + +Trader +------------------------------ +the backtest engine using TargetSelector, MLMachine or free style. + +TradingSignal +------------------------------ +the signal contains information about how to trade. \ No newline at end of file diff --git a/docs/source/usage.rst b/docs/source/usage.rst new file mode 100644 index 00000000..310067c3 --- /dev/null +++ b/docs/source/usage.rst @@ -0,0 +1,23 @@ +==================== +Usage +==================== + +Here show some use cases of zvt. + +Data +------------------------------ + +Factor +------------------------------ + +Backtesting +------------------------------ + +Machine learning +------------------------------ + +Draw by intent +------------------------------ + +Informer +------------------------------ \ No newline at end of file diff --git a/examples/README.MD b/examples/README.MD new file mode 100644 index 00000000..4fdadade --- /dev/null +++ b/examples/README.MD @@ -0,0 +1 @@ +The examples are updating with master source code. diff --git a/examples/recorders/__init__.py b/examples/data_runner/__init__.py similarity index 100% rename from examples/recorders/__init__.py rename to examples/data_runner/__init__.py diff --git a/examples/data_runner/actor_runner.py b/examples/data_runner/actor_runner.py new file mode 100644 index 00000000..8a61e4e2 --- /dev/null +++ b/examples/data_runner/actor_runner.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +import logging + +from apscheduler.schedulers.background import BackgroundScheduler + +from zvt import init_log +from zvt.domain import ( + StockInstitutionalInvestorHolder, + StockTopTenFreeHolder, + StockActorSummary, +) +from zvt.utils.recorder_utils import run_data_recorder + +logger = logging.getLogger(__name__) + +sched = BackgroundScheduler() + + +@sched.scheduled_job("cron", hour=1, minute=00, day_of_week=2) +def record_actor_data(data_provider="em", entity_provider="em"): + run_data_recorder( + domain=StockInstitutionalInvestorHolder, + data_provider=data_provider, + entity_provider=entity_provider, + day_data=True, + ) + run_data_recorder( + domain=StockTopTenFreeHolder, data_provider=data_provider, entity_provider=entity_provider, day_data=True + ) + run_data_recorder( + domain=StockActorSummary, data_provider=data_provider, entity_provider=entity_provider, day_data=True + ) + + +if __name__ == "__main__": + init_log("actor_runner.log") + + record_actor_data() + + sched.start() + + sched._thread.join() diff --git a/examples/data_runner/finance_runner.py b/examples/data_runner/finance_runner.py new file mode 100644 index 00000000..5966700f --- /dev/null +++ b/examples/data_runner/finance_runner.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +import logging + +from apscheduler.schedulers.background import BackgroundScheduler + +from zvt import init_log +from zvt.domain import ( + Stock, + StockDetail, + FinanceFactor, + BalanceSheet, + IncomeStatement, + CashFlowStatement, +) +from zvt.utils.recorder_utils import run_data_recorder + +logger = logging.getLogger(__name__) + +sched = BackgroundScheduler() + + +@sched.scheduled_job("cron", hour=1, minute=00, day_of_week=5) +def record_actor_data(data_provider="eastmoney", entity_provider="eastmoney"): + run_data_recorder(domain=Stock, data_provider=data_provider) + run_data_recorder(domain=StockDetail, data_provider=data_provider) + run_data_recorder(domain=FinanceFactor, data_provider=data_provider, entity_provider=entity_provider, day_data=True) + run_data_recorder(domain=BalanceSheet, data_provider=data_provider, entity_provider=entity_provider, day_data=True) + run_data_recorder( + domain=IncomeStatement, data_provider=data_provider, entity_provider=entity_provider, day_data=True + ) + run_data_recorder( + domain=CashFlowStatement, data_provider=data_provider, entity_provider=entity_provider, day_data=True + ) + + +if __name__ == "__main__": + init_log("finance_runner.log") + + record_actor_data() + + sched.start() + + sched._thread.join() diff --git a/examples/data_runner/index_runner.py b/examples/data_runner/index_runner.py new file mode 100644 index 00000000..3751871c --- /dev/null +++ b/examples/data_runner/index_runner.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +import logging + +from apscheduler.schedulers.background import BackgroundScheduler + +from zvt import init_log +from zvt.consts import IMPORTANT_INDEX +from zvt.domain import Index, Index1dKdata, IndexStock +from zvt.utils.recorder_utils import run_data_recorder + +logger = logging.getLogger(__name__) + +sched = BackgroundScheduler() + + +# 自行更改定定时运行时间 +@sched.scheduled_job("cron", hour=1, minute=00, day_of_week=5) +def record_index(): + run_data_recorder(domain=Index, data_provider="exchange") + # 默认只抓取 国证1000 国证2000 国证成长 国证价值 的组成个股 + index_ids = ["index_sz_399311", "index_sz_399303", "index_sz_399370", "index_sz_399371"] + run_data_recorder(domain=IndexStock, data_provider="exchange", entity_provider="exchange", entity_ids=index_ids) + + +@sched.scheduled_job("cron", hour=16, minute=20) +def record_index_kdata(): + run_data_recorder(domain=Index, data_provider="em") + run_data_recorder( + domain=Index1dKdata, data_provider="em", entity_provider="em", codes=IMPORTANT_INDEX, day_data=True + ) + + +if __name__ == "__main__": + init_log("index_runner.log") + + record_index() + record_index_kdata() + + sched.start() + + sched._thread.join() diff --git a/examples/data_runner/joinquant_fund_runner.py b/examples/data_runner/joinquant_fund_runner.py new file mode 100644 index 00000000..0749afaf --- /dev/null +++ b/examples/data_runner/joinquant_fund_runner.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +import logging + +from apscheduler.schedulers.background import BackgroundScheduler + +from zvt import init_log +from zvt.domain import Fund, FundStock, StockValuation +from zvt.utils.recorder_utils import run_data_recorder + +logger = logging.getLogger(__name__) + +sched = BackgroundScheduler() + + +# 周6抓取 +@sched.scheduled_job("cron", hour=10, minute=00, day_of_week=5) +def record_fund_data(data_provider="joinquant", entity_provider="joinquant"): + # 基金 + run_data_recorder(domain=Fund, data_provider=data_provider, sleeping_time=0) + # 基金持仓 + run_data_recorder(domain=FundStock, data_provider=data_provider, entity_provider=entity_provider, sleeping_time=0) + # 个股估值 + run_data_recorder( + domain=StockValuation, + data_provider=data_provider, + entity_provider=entity_provider, + sleeping_time=0, + day_data=True, + ) + + +if __name__ == "__main__": + init_log("joinquant_fund_runner.log") + + record_fund_data() + + sched.start() + + sched._thread.join() diff --git a/examples/data_runner/joinquant_kdata_runner.py b/examples/data_runner/joinquant_kdata_runner.py new file mode 100644 index 00000000..902262e2 --- /dev/null +++ b/examples/data_runner/joinquant_kdata_runner.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +import logging + +from apscheduler.schedulers.background import BackgroundScheduler + +from zvt.domain import StockTradeDay + +logger = logging.getLogger(__name__) + +sched = BackgroundScheduler() + +# -*- coding: utf-8 -*- +import logging + +from apscheduler.schedulers.background import BackgroundScheduler + +from zvt.utils.recorder_utils import run_data_recorder +from zvt import init_log +from zvt.domain import Stock, Stock1dHfqKdata + +logger = logging.getLogger(__name__) + +sched = BackgroundScheduler() + + +@sched.scheduled_job("cron", hour=15, minute=30) +def record_stock_data(data_provider="joinquant", entity_provider="joinquant"): + # A股标的 + run_data_recorder(domain=Stock, data_provider=data_provider, force_update=False) + # 交易日 + run_data_recorder(domain=StockTradeDay, data_provider=data_provider) + # A股后复权行情 + run_data_recorder( + domain=Stock1dHfqKdata, + data_provider=data_provider, + entity_provider=entity_provider, + day_data=True, + sleeping_time=0, + ) + + +if __name__ == "__main__": + init_log("joinquant_kdata_runner.log") + + record_stock_data() + + sched.start() + + sched._thread.join() diff --git a/examples/data_runner/kdata_runner.py b/examples/data_runner/kdata_runner.py new file mode 100644 index 00000000..256a82ed --- /dev/null +++ b/examples/data_runner/kdata_runner.py @@ -0,0 +1,181 @@ +# -*- coding: utf-8 -*- +import logging + +from apscheduler.schedulers.background import BackgroundScheduler + +from examples.report_utils import inform +from examples.utils import get_hot_topics +from zvt import init_log, zvt_config +from zvt.api.selector import get_entity_ids_by_filter +from zvt.domain import ( + Stock, + Stock1dHfqKdata, + Stockhk, + Stockhk1dHfqKdata, + Block, + Block1dKdata, + BlockCategory, + Index, + Index1dKdata, + StockNews, + LimitUpInfo, +) +from zvt.informer import EmailInformer +from zvt.utils.time_utils import current_date +from zvt.utils.recorder_utils import run_data_recorder + +logger = logging.getLogger(__name__) + +sched = BackgroundScheduler() + + +@sched.scheduled_job("cron", hour=16, minute=30, day_of_week="mon-fri") +def record_stock_news(data_provider="em"): + normal_stock_ids = get_entity_ids_by_filter( + provider="em", ignore_delist=True, ignore_st=False, ignore_new_stock=False + ) + + run_data_recorder( + entity_ids=normal_stock_ids, + day_data=True, + domain=StockNews, + data_provider=data_provider, + force_update=False, + sleeping_time=2, + ) + + +def report_limit_up(): + latest_data = LimitUpInfo.query_data(order=LimitUpInfo.timestamp.desc(), limit=1, return_type="domain") + timestamp = latest_data[0].timestamp + df = LimitUpInfo.query_data(start_timestamp=timestamp, end_timestamp=timestamp, columns=["code", "name", "reason"]) + df["reason"] = df["reason"].str.split("+") + print(df) + EmailInformer().send_message(zvt_config["email_username"], f"{timestamp} 热门报告", f"{df}") + + +def report_hot_topics(): + topics_long = get_hot_topics(days_ago=20) + topics_short = get_hot_topics(days_ago=5) + + set1 = set(topics_long.keys()) + set2 = set(topics_short.keys()) + + same = set1 & set2 + print(same) + + old_topics = set1 - set2 + print(old_topics) + new_topics = set2 - set1 + print(new_topics) + + msg = f""" + 一直热门:{same} + ---:{old_topics} + +++:{new_topics} + + 长期统计:{topics_long} + 短期统计:{topics_short} + """ + + print(msg) + EmailInformer().send_message(zvt_config["email_username"], f"{current_date()} 热门报告", msg) + + +@sched.scheduled_job("cron", hour=15, minute=30, day_of_week="mon-fri") +def record_stock_data(data_provider="em", entity_provider="em", sleeping_time=0): + email_action = EmailInformer() + # 涨停数据 + run_data_recorder(domain=LimitUpInfo, data_provider=None, force_update=False) + report_limit_up() + + # A股指数 + run_data_recorder(domain=Index, data_provider=data_provider, force_update=False) + # A股指数行情 + run_data_recorder( + domain=Index1dKdata, + data_provider=data_provider, + entity_provider=entity_provider, + day_data=True, + sleeping_time=sleeping_time, + ) + + # 板块(概念,行业) + run_data_recorder(domain=Block, entity_provider=entity_provider, data_provider=entity_provider, force_update=False) + # 板块行情(概念,行业) + run_data_recorder( + domain=Block1dKdata, + entity_provider=entity_provider, + data_provider=entity_provider, + day_data=True, + sleeping_time=sleeping_time, + ) + # run_data_recorder( + # domain=BlockStock, + # entity_provider=entity_provider, + # data_provider=entity_provider, + # sleeping_time=sleeping_time, + # ) + + # 报告新概念和行业 + df = Block.query_data( + filters=[Block.category == BlockCategory.concept.value], + order=Block.list_date.desc(), + index="entity_id", + limit=7, + ) + + inform( + action=email_action, + entity_ids=df.index.tolist(), + target_date=current_date(), + title="report 新概念", + entity_provider=entity_provider, + entity_type="block", + em_group=None, + em_group_over_write=False, + ) + + # A股标的 + run_data_recorder(domain=Stock, data_provider=data_provider, force_update=False) + # A股后复权行情 + normal_stock_ids = get_entity_ids_by_filter( + provider="em", ignore_delist=True, ignore_st=False, ignore_new_stock=False + ) + + run_data_recorder( + entity_ids=normal_stock_ids, + domain=Stock1dHfqKdata, + data_provider=data_provider, + entity_provider=entity_provider, + day_data=True, + sleeping_time=sleeping_time, + return_unfinished=True, + ) + + +@sched.scheduled_job("cron", hour=16, minute=30, day_of_week="mon-fri") +def record_stockhk_data(data_provider="em", entity_provider="em", sleeping_time=2): + # 港股标的 + run_data_recorder(domain=Stockhk, data_provider=data_provider, force_update=False) + # 港股后复权行情 + df = Stockhk.query_data(filters=[Stockhk.south == True], index="entity_id") + run_data_recorder( + domain=Stockhk1dHfqKdata, + entity_ids=df.index.tolist(), + data_provider=data_provider, + entity_provider=entity_provider, + day_data=True, + sleeping_time=sleeping_time, + ) + + +if __name__ == "__main__": + init_log("kdata_runner.log") + + record_stock_data() + record_stockhk_data() + + sched.start() + + sched._thread.join() diff --git a/examples/data_runner/sina_data_runner.py b/examples/data_runner/sina_data_runner.py new file mode 100644 index 00000000..0f91fa4f --- /dev/null +++ b/examples/data_runner/sina_data_runner.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +import logging + +from apscheduler.schedulers.background import BackgroundScheduler + +from zvt import init_log +from zvt.domain import * +from zvt.utils.recorder_utils import run_data_recorder + +logger = logging.getLogger(__name__) + +sched = BackgroundScheduler() + + +@sched.scheduled_job("cron", hour=15, minute=30, day_of_week=3) +def record_block(): + run_data_recorder(domain=Block, data_provider="sina") + run_data_recorder(domain=Block, data_provider="sina", entity_provider="sina") + + +@sched.scheduled_job("cron", hour=15, minute=30) +def record_money_flow(): + run_data_recorder(domain=BlockMoneyFlow, data_provider="sina", entity_provider="sina", day_data=True) + + +if __name__ == "__main__": + init_log("sina_data_runner.log") + + record_block() + record_money_flow() + + sched.start() + + sched._thread.join() diff --git a/examples/data_runner/trading_runner.py b/examples/data_runner/trading_runner.py new file mode 100644 index 00000000..ab473fde --- /dev/null +++ b/examples/data_runner/trading_runner.py @@ -0,0 +1,95 @@ +# -*- coding: utf-8 -*- +import logging + +from apscheduler.schedulers.background import BackgroundScheduler +from sqlalchemy import or_, and_ + +from examples.report_utils import inform +from zvt import init_log +from zvt.api.kdata import get_latest_kdata_date +from zvt.api.selector import get_big_players +from zvt.domain import ( + DragonAndTiger, + Stock1dHfqKdata, +) +from zvt.informer import EmailInformer +from zvt.utils.recorder_utils import run_data_recorder +from zvt.utils.time_utils import date_time_by_interval, current_date, to_pd_timestamp + +logger = logging.getLogger(__name__) + +sched = BackgroundScheduler() + + +@sched.scheduled_job("cron", hour=18, minute=00, day_of_week="mon-fri") +def record_dragon_tiger(data_provider="em", entity_provider="em", sleeping_time=2): + # 龙虎榜数据 + run_data_recorder( + domain=DragonAndTiger, + data_provider=data_provider, + entity_provider=entity_provider, + day_data=True, + sleeping_time=sleeping_time, + ) + + email_action = EmailInformer() + # recent year + start_timestamp = date_time_by_interval(current_date(), -400) + # 最近一年牛x的营业部 + players = get_big_players(start_timestamp=start_timestamp) + + # 最近30天有牛x的营业部上榜的个股 + recent_date = date_time_by_interval(current_date(), -30) + selected = [] + for player in players: + filters = [ + or_( + and_(DragonAndTiger.dep1 == player, DragonAndTiger.dep1_rate >= 5), + and_(DragonAndTiger.dep2 == player, DragonAndTiger.dep2_rate >= 5), + and_(DragonAndTiger.dep3 == player, DragonAndTiger.dep3_rate >= 5), + and_(DragonAndTiger.dep4 == player, DragonAndTiger.dep4_rate >= 5), + and_(DragonAndTiger.dep5 == player, DragonAndTiger.dep5_rate >= 5), + ) + ] + df = DragonAndTiger.query_data( + start_timestamp=recent_date, + filters=filters, + columns=[DragonAndTiger.timestamp, DragonAndTiger.entity_id, DragonAndTiger.code, DragonAndTiger.name], + index="entity_id", + ) + selected = selected + df.index.tolist() + + if selected: + selected = list(set(selected)) + + target_date = get_latest_kdata_date(provider=data_provider, entity_type="stock", adjust_type="hfq") + df = Stock1dHfqKdata.query_data( + provider=data_provider, + entity_ids=selected, + filters=[ + Stock1dHfqKdata.turnover_rate > 0.02, + Stock1dHfqKdata.timestamp == to_pd_timestamp(target_date), + Stock1dHfqKdata.turnover > 300000000, + ], + index=["entity_id"], + ) + inform( + action=email_action, + entity_ids=df.index.tolist(), + target_date=current_date(), + title="report 龙虎榜", + entity_provider=entity_provider, + entity_type="stock", + em_group="重要指数", + em_group_over_write=False, + ) + + +if __name__ == "__main__": + init_log("trading_runner.log") + + record_dragon_tiger() + + sched.start() + + sched._thread.join() diff --git a/examples/factors/boll_factor.py b/examples/factors/boll_factor.py new file mode 100644 index 00000000..aef71d36 --- /dev/null +++ b/examples/factors/boll_factor.py @@ -0,0 +1,68 @@ +# -*- coding: utf-8 -*- +from typing import Optional, List + +import pandas as pd +from ta.volatility import BollingerBands + +from zvt.contract.factor import Transformer +from zvt.factors.technical_factor import TechnicalFactor + + +class BollTransformer(Transformer): + def transform_one(self, entity_id, df: pd.DataFrame) -> pd.DataFrame: + indicator_bb = BollingerBands(close=df["close"], window=20, window_dev=2) + + # Add Bollinger Bands features + df["bb_bbm"] = indicator_bb.bollinger_mavg() + df["bb_bbh"] = indicator_bb.bollinger_hband() + df["bb_bbl"] = indicator_bb.bollinger_lband() + + # Add Bollinger Band high indicator + df["bb_bbhi"] = indicator_bb.bollinger_hband_indicator() + + # Add Bollinger Band low indicator + df["bb_bbli"] = indicator_bb.bollinger_lband_indicator() + + # Add Width Size Bollinger Bands + df["bb_bbw"] = indicator_bb.bollinger_wband() + + # Add Percentage Bollinger Bands + df["bb_bbp"] = indicator_bb.bollinger_pband() + return df + + +class BollFactor(TechnicalFactor): + transformer = BollTransformer() + + def drawer_factor_df_list(self) -> Optional[List[pd.DataFrame]]: + return [self.factor_df[["bb_bbm", "bb_bbh", "bb_bbl"]]] + + def compute_result(self): + super().compute_result() + self.result_df = (self.factor_df["bb_bbli"] - self.factor_df["bb_bbhi"]).to_frame(name="filter_result") + self.result_df[self.result_df == 0] = None + self.result_df[self.result_df == 1] = True + self.result_df[self.result_df == -1] = False + + +if __name__ == "__main__": + from zvt.domain import Stock1dHfqKdata + + provider = "em" + entity_ids = ["stock_sz_000338", "stock_sh_601318"] + Stock1dHfqKdata.record_data(entity_ids=entity_ids, provider=provider) + factor = BollFactor( + entity_ids=entity_ids, provider=provider, entity_provider=provider, start_timestamp="2019-01-01" + ) + factor.draw(show=True) + + from zvt.domain import Stock30mHfqKdata + + provider = "em" + entity_ids = ["stock_sz_000338", "stock_sh_601318"] + + Stock30mHfqKdata.record_data(entity_ids=entity_ids, provider=provider) + factor = BollFactor( + entity_ids=entity_ids, provider=provider, entity_provider=provider, start_timestamp="2021-01-01" + ) + factor.draw(show=True) diff --git a/examples/factors/fundamental_selector.py b/examples/factors/fundamental_selector.py index 90cf0129..7495c825 100644 --- a/examples/factors/fundamental_selector.py +++ b/examples/factors/fundamental_selector.py @@ -1,31 +1,39 @@ # -*- coding: utf-8 -*- from zvt.domain import BalanceSheet -from zvt.factors.fundamental import GoodCompanyFactor +from zvt.factors.fundamental.finance_factor import GoodCompanyFactor from zvt.factors.target_selector import TargetSelector class FundamentalSelector(TargetSelector): - def init_factors(self, entity_ids, entity_schema, exchanges, codes, the_timestamp, start_timestamp, end_timestamp, - level): + def init_factors(self, entity_ids, entity_schema, exchanges, codes, start_timestamp, end_timestamp, level): # 核心资产=(高ROE 高现金流 高股息 低应收 低资本开支 低财务杠杆 有增长) # 高roe 高现金流 低财务杠杆 有增长 - factor1 = GoodCompanyFactor(entity_ids=entity_ids, codes=codes, the_timestamp=the_timestamp, - start_timestamp=start_timestamp, end_timestamp=end_timestamp, provider='eastmoney') + factor1 = GoodCompanyFactor( + entity_ids=entity_ids, + codes=codes, + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + provider="eastmoney", + ) - self.filter_factors.append(factor1) + self.factors.append(factor1) # 高股息 低应收 - factor2 = GoodCompanyFactor(data_schema=BalanceSheet, entity_ids=entity_ids, codes=codes, - the_timestamp=the_timestamp, - columns=[BalanceSheet.accounts_receivable], - filters=[ - BalanceSheet.accounts_receivable <= 0.3 * BalanceSheet.total_current_assets], - start_timestamp=start_timestamp, end_timestamp=end_timestamp, provider='eastmoney', - col_period_threshold=None) - self.filter_factors.append(factor2) + factor2 = GoodCompanyFactor( + data_schema=BalanceSheet, + entity_ids=entity_ids, + codes=codes, + columns=[BalanceSheet.accounts_receivable], + filters=[BalanceSheet.accounts_receivable <= 0.3 * BalanceSheet.total_current_assets], + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + provider="eastmoney", + col_period_threshold=None, + ) + self.factors.append(factor2) -if __name__ == '__main__': - selector: TargetSelector = FundamentalSelector(start_timestamp='2015-01-01', end_timestamp='2019-06-30') +if __name__ == "__main__": + selector: TargetSelector = FundamentalSelector(start_timestamp="2015-01-01", end_timestamp="2019-06-30") selector.run() - print(selector.get_targets('2019-06-30')) + print(selector.get_targets("2019-06-30")) diff --git a/examples/factors/tech_factor.py b/examples/factors/tech_factor.py new file mode 100644 index 00000000..544aa4e5 --- /dev/null +++ b/examples/factors/tech_factor.py @@ -0,0 +1,87 @@ +# -*- coding: utf-8 -*- + +from typing import Type, List, Union + +import pandas as pd + +from zvt.contract import AdjustType, TradableEntity, IntervalLevel +from zvt.contract.factor import Transformer, Accumulator +from zvt.domain import Stock +from zvt.factors.macd.macd_factor import MacdFactor +from zvt.factors.transformers import CrossMaTransformer + + +class BullAndUpFactor(MacdFactor): + def __init__( + self, + entity_schema: Type[TradableEntity] = Stock, + provider: str = None, + entity_provider: str = None, + entity_ids: List[str] = None, + exchanges: List[str] = None, + codes: List[str] = None, + start_timestamp: Union[str, pd.Timestamp] = None, + end_timestamp: Union[str, pd.Timestamp] = None, + columns: List = None, + filters: List = None, + order: object = None, + limit: int = None, + level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY, + category_field: str = "entity_id", + time_field: str = "timestamp", + keep_window: int = None, + keep_all_timestamp: bool = False, + fill_method: str = "ffill", + effective_number: int = None, + transformer: Transformer = None, + accumulator: Accumulator = None, + need_persist: bool = False, + only_compute_factor: bool = False, + factor_name: str = None, + clear_state: bool = False, + only_load_factor: bool = False, + adjust_type: Union[AdjustType, str] = None, + turnover_threshold=400000000, + turnover_rate_threshold=0.02, + ) -> None: + self.turnover_threshold = turnover_threshold + self.turnover_rate_threshold = turnover_rate_threshold + + super().__init__( + entity_schema, + provider, + entity_provider, + entity_ids, + exchanges, + codes, + start_timestamp, + end_timestamp, + columns, + filters, + order, + limit, + level, + category_field, + time_field, + keep_window, + keep_all_timestamp, + fill_method, + effective_number, + transformer, + accumulator, + need_persist, + only_compute_factor, + factor_name, + clear_state, + only_load_factor, + adjust_type, + ) + + def compute_result(self): + super().compute_result() + t = CrossMaTransformer(windows=[5, 120, 250]) + self.factor_df = t.transform(self.factor_df) + s = (self.factor_df["turnover"] > self.turnover_threshold) & ( + self.factor_df["turnover_rate"] > self.turnover_rate_threshold + ) + self.result_df = (self.factor_df["filter_result"] & self.factor_df["bull"] & s).to_frame(name="filter_result") diff --git a/examples/hot.json b/examples/hot.json new file mode 100644 index 00000000..563d00ac --- /dev/null +++ b/examples/hot.json @@ -0,0 +1,67 @@ +{ + "减肥药": [ + "减肥药" + ], + "房地产": [ + "房地产", + "新型城镇化", + "棚改", + "建材" + ], + "新型工业化": [ + "新型工业化", + "工业母机" + ], + "华为": [ + "华为", + "mate60 pro,mate", + "星闪", + "问界", + "麒麟", + "昇腾", + "鸿蒙" + ], + "新能源": [ + "新能源", + "锂电,锂电池", + "钠离子电池", + "光伏", + "太阳能", + "储能", + "TOPCON电池", + "风电", + "核电" + ], + "新能车": [ + "新能车,新能源汽车", + "整车,汽车整车", + "汽车零部件,汽车零件", + "无人驾驶", + "压铸一体化,一体化压铸" + ], + "人工智能": [ + "人工智能,AI", + "GPT,CHATGPT", + "算力" + ], + "机器人": [ + "机器人", + "减速器", + "伺服,伺服系统", + "控制系统", + "电机" + ], + "核心资产": [ + "核心资产", + "消费,白酒,食品,饮料", + "白马", + "沪深300", + "基金重仓", + "上证50" + ], + "一带一路": [ + "一带一路", + "人民币国际化", + "跨境支付" + ] +} diff --git a/tests/domain/__init__.py b/examples/intent/__init__.py similarity index 100% rename from tests/domain/__init__.py rename to examples/intent/__init__.py diff --git a/examples/intent/intent.py b/examples/intent/intent.py new file mode 100644 index 00000000..762df26c --- /dev/null +++ b/examples/intent/intent.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +from zvt.api.intent import compare +from zvt.domain import Indexus1dKdata, Index, Indexus, Index1dKdata, Currency1dKdata +from zvt.domain import TreasuryYield + + +def china_vs_us_stock(): + # 上证,道琼斯指数 + Index.record_data() + Indexus.record_data() + Index1dKdata.record_data(entity_id="index_sh_000001") + Indexus1dKdata.record_data(entity_id="indexus_us_SPX") + compare(entity_ids=["index_sh_000001", "indexus_us_SPX"], start_timestamp="2000-01-01", scale_value=100) + + +def us_yield_and_stock(): + # 美债收益率,道琼斯指数 + entity_ids = ["country_galaxy_US", "indexus_us_SPX"] + compare( + entity_ids=entity_ids, + start_timestamp="1990-01-01", + scale_value=None, + schema_map_columns={TreasuryYield: ["yield_2", "yield_5"], Indexus1dKdata: ["close"]}, + ) + + +def commodity_and_stock(): + # 江西铜业,沪铜 + entity_ids = ["stock_sh_600362", "future_shfe_CU"] + compare( + entity_ids=entity_ids, + start_timestamp="2005-01-01", + scale_value=100, + ) + + +def compare_metal(): + # 沪铜,沪铝,螺纹钢 + entity_ids = ["future_shfe_CU", "future_shfe_AL", "future_shfe_RB"] + compare( + entity_ids=entity_ids, + start_timestamp="2009-04-01", + scale_value=100, + ) + + +def compare_udi_and_stock(): + # 美股指数 + # Indexus.record_data() + entity_ids = ["indexus_us_NDX", "indexus_us_SPX", "indexus_us_UDI"] + # Indexus1dKdata.record_data(entity_ids=entity_ids, sleeping_time=0) + compare( + entity_ids=entity_ids, + start_timestamp="2015-01-01", + scale_value=100, + schema_map_columns={Indexus1dKdata: ["close"]}, + ) + + +def compare_cny_and_stock(): + Currency1dKdata.record_data(entity_id="currency_forex_USDCNYC") + entity_ids = ["index_sh_000001", "currency_forex_USDCNYC"] + compare( + entity_ids=entity_ids, + start_timestamp="2005-01-01", + scale_value=100, + schema_map_columns={Currency1dKdata: ["close"], Index1dKdata: ["close"]}, + ) + + +if __name__ == "__main__": + # compare_kline() + # us_yield_and_stock() + # commodity_and_stock() + # compare_metal() + # compare_udi_and_stock() + compare_cny_and_stock() diff --git a/examples/main_line_hidden_tags.json b/examples/main_line_hidden_tags.json new file mode 100644 index 00000000..66b7decf --- /dev/null +++ b/examples/main_line_hidden_tags.json @@ -0,0 +1,3 @@ +[ + "次新股" +] \ No newline at end of file diff --git a/examples/main_line_tags.json b/examples/main_line_tags.json new file mode 100644 index 00000000..ddaa2d55 --- /dev/null +++ b/examples/main_line_tags.json @@ -0,0 +1,5 @@ +[ + "智能机器", + "半导体", + "AI" +] \ No newline at end of file diff --git a/examples/migration.py b/examples/migration.py new file mode 100644 index 00000000..0999ddd9 --- /dev/null +++ b/examples/migration.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +from datetime import datetime +from typing import Dict + +from pydantic import BaseModel, ConfigDict +from sqlalchemy import Column, String, JSON +from sqlalchemy.orm import declarative_base + +from zvt.contract.api import get_db_session +from zvt.contract.register import register_schema +from zvt.contract.schema import Mixin + +ZvtInfoBase = declarative_base() + + +class User(Mixin, ZvtInfoBase): + __tablename__ = "user" + added_col = Column(String) + json_col = Column(JSON) + + +class UserModel(BaseModel): + model_config = ConfigDict(from_attributes=True) + + id: str + entity_id: str + timestamp: datetime + added_col: str + json_col: Dict + + +register_schema(providers=["zvt"], db_name="test", schema_base=ZvtInfoBase) + +if __name__ == "__main__": + user_model = UserModel( + id="user_cn_jack_2020-01-01", + entity_id="user_cn_jack", + timestamp="2020-01-01", + added_col="test", + json_col={"a": 1}, + ) + session = get_db_session(provider="zvt", data_schema=User) + + user = session.query(User).filter(User.id == "user_cn_jack_2020-01-01").first() + print(UserModel.validate(user)) diff --git a/zvt/ui/apps/__init__.py b/examples/ml/__init__.py similarity index 100% rename from zvt/ui/apps/__init__.py rename to examples/ml/__init__.py diff --git a/examples/ml/sgd.py b/examples/ml/sgd.py new file mode 100644 index 00000000..4b0fea85 --- /dev/null +++ b/examples/ml/sgd.py @@ -0,0 +1,27 @@ +# -*- coding: utf-8 -*- +from sklearn.linear_model import SGDClassifier, SGDRegressor +from sklearn.pipeline import make_pipeline +from sklearn.preprocessing import StandardScaler + +from zvt.ml import MaStockMLMachine + + +def sgd_classification(): + machine = MaStockMLMachine(data_provider="em", entity_ids=["stock_sz_000001"], label_method="behavior_cls") + clf = make_pipeline(StandardScaler(), SGDClassifier(max_iter=1000, tol=1e-3)) + machine.train(model=clf) + machine.predict() + machine.draw_result(entity_id="stock_sz_000001") + + +def sgd_regressor(): + machine = MaStockMLMachine(data_provider="em", entity_ids=["stock_sz_000001"], label_method="raw") + reg = make_pipeline(StandardScaler(), SGDRegressor(max_iter=1000, tol=1e-3)) + machine.train(model=reg) + machine.predict() + machine.draw_result(entity_id="stock_sz_000001") + + +if __name__ == "__main__": + sgd_classification() + sgd_regressor() diff --git a/examples/query_snippet.py b/examples/query_snippet.py new file mode 100644 index 00000000..238728e8 --- /dev/null +++ b/examples/query_snippet.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +from sqlalchemy import func + +from zvt.api.selector import get_entity_ids_by_filter +from zvt.contract import Exchange +from zvt.domain import Stock, BlockStock +from zvt.recorders.em import em_api +from zvt.tag.tag_schemas import StockTags + + +def query_json(): + + df = StockTags.query_data( + filters=[func.json_extract(StockTags.sub_tags, '$."低空经济"') != None], columns=[StockTags.sub_tags] + ) + print(df) + + +def get_stocks_has_tag(): + df = StockTags.query_data(filters=[StockTags.latest.is_(True)], columns=[StockTags.entity_id]) + return df["entity_id"].tolist() + + +def get_stocks_without_tag(): + entity_ids = get_entity_ids_by_filter(provider="em", ignore_delist=True, ignore_st=True, ignore_new_stock=False) + stocks_has_tag = get_stocks_has_tag() + return list(set(entity_ids) - set(stocks_has_tag)) + + +def get_all_delist_stocks(): + stocks = [] + df1 = em_api.get_tradable_list(entity_type="stock", exchange=Exchange.sh) + stocks = stocks + df1["entity_id"].tolist() + df2 = em_api.get_tradable_list(entity_type="stock", exchange=Exchange.sz) + stocks = stocks + df2["entity_id"].tolist() + df3 = em_api.get_tradable_list(entity_type="stock", exchange=Exchange.bj) + stocks = stocks + df3["entity_id"].tolist() + return stocks + + +def get_block_stocks(name="低空经济"): + df = BlockStock.query_data(provider="em", filters=[BlockStock.name == name], columns=[BlockStock.stock_id]) + return df["stock_id"].tolist() + + +def get_sub_tag_stocks(tag="低空经济"): + df = StockTags.query_data( + provider="zvt", + filters=[func.json_extract(StockTags.sub_tags, f'$."{tag}"') != None], + columns=[StockTags.entity_id], + ) + return df["entity_id"].tolist() + + +if __name__ == "__main__": + # a = get_block_stocks() + # b = get_sub_tag_stocks() + # print(set(a) - set(b)) + print(Stock.query_data(provider="em", return_type="dict")) diff --git a/examples/recorders/eastmoney_data_runner1.py b/examples/recorders/eastmoney_data_runner1.py deleted file mode 100644 index ef3d4616..00000000 --- a/examples/recorders/eastmoney_data_runner1.py +++ /dev/null @@ -1,48 +0,0 @@ -# -*- coding: utf-8 -*- -import logging -import time - -from apscheduler.schedulers.background import BackgroundScheduler - -from zvt import init_log -from zvt.domain import * -from zvt.informer.informer import EmailInformer - -logger = logging.getLogger(__name__) - -sched = BackgroundScheduler() - - -# 自行更改定定时运行时间 -# 这些数据都是些低频分散的数据,每天更新一次即可 -@sched.scheduled_job('cron', hour=2, minute=00, day_of_week=5) -def run(): - while True: - email_action = EmailInformer() - - try: - Stock.record_data(provider='eastmoney') - StockDetail.record_data(provider='eastmoney') - FinanceFactor.record_data(provider='eastmoney') - BalanceSheet.record_data(provider='eastmoney') - IncomeStatement.record_data(provider='eastmoney') - CashFlowStatement.record_data(provider='eastmoney') - - email_action.send_message("5533061@qq.com", 'eastmoney runner1 finished', '') - break - except Exception as e: - msg = f'eastmoney runner1 error:{e}' - logger.exception(msg) - - email_action.send_message("5533061@qq.com", 'eastmoney runner1 error', msg) - time.sleep(60) - - -if __name__ == '__main__': - init_log('eastmoney_data_runner1.log') - - run() - - sched.start() - - sched._thread.join() diff --git a/examples/recorders/eastmoney_data_runner2.py b/examples/recorders/eastmoney_data_runner2.py deleted file mode 100644 index 2d9f4d1f..00000000 --- a/examples/recorders/eastmoney_data_runner2.py +++ /dev/null @@ -1,47 +0,0 @@ -# -*- coding: utf-8 -*- -import logging -import time - -from apscheduler.schedulers.background import BackgroundScheduler - -from zvt import init_log -from zvt.domain import * -from zvt.informer.informer import EmailInformer - -logger = logging.getLogger(__name__) - -sched = BackgroundScheduler() - - -# 自行更改定定时运行时间 -# 这些数据都是些低频分散的数据,每天更新一次即可 -@sched.scheduled_job('cron', hour=2, minute=00, day_of_week=4) -def run(): - while True: - email_action = EmailInformer() - - try: - # DividendFinancing.record_data(provider='eastmoney') - # HolderTrading.record_data(provider='eastmoney') - # ManagerTrading.record_data(provider='eastmoney') - TopTenHolder.record_data(provider='eastmoney') - TopTenTradableHolder.record_data(provider='eastmoney') - - email_action.send_message("5533061@qq.com", 'eastmoney runner2 finished', '') - break - except Exception as e: - msg = f'eastmoney runner2 error:{e}' - logger.exception(msg) - - email_action.send_message("5533061@qq.com", 'eastmoney runner2 error', msg) - time.sleep(60) - - -if __name__ == '__main__': - init_log('eastmoney_data_runner2.log') - - run() - - sched.start() - - sched._thread.join() diff --git a/examples/recorders/joinquant_fund_recorder.py b/examples/recorders/joinquant_fund_recorder.py deleted file mode 100644 index 67ce9ed7..00000000 --- a/examples/recorders/joinquant_fund_recorder.py +++ /dev/null @@ -1,67 +0,0 @@ -# -*- coding: utf-8 -*- -import logging -import time - -from apscheduler.schedulers.background import BackgroundScheduler - -from zvt import init_log -from zvt.domain import Fund, FundStock, Stock1wkHfqKdata, StockValuation -from zvt.informer.informer import EmailInformer - -logger = logging.getLogger(__name__) - -sched = BackgroundScheduler() - - -# 周6抓取 -@sched.scheduled_job('cron', hour=10, minute=00, day_of_week=5) -def record_fund(): - while True: - email_action = EmailInformer() - - try: - # 基金和基金持仓数据 - Fund.record_data(provider='joinquant', sleeping_time=1) - FundStock.record_data(provider='joinquant', sleeping_time=1) - # 股票周线后复权数据 - Stock1wkHfqKdata.record_data(provider='joinquant', sleeping_time=0) - - email_action.send_message("5533061@qq.com", 'joinquant record fund finished', '') - break - except Exception as e: - msg = f'joinquant record fund error:{e}' - logger.exception(msg) - - email_action.send_message("5533061@qq.com", 'joinquant record fund error', msg) - time.sleep(60) - - -# 周6抓取 -@sched.scheduled_job('cron', hour=13, minute=00, day_of_week=6) -def record_valuation(): - while True: - email_action = EmailInformer() - - try: - StockValuation.record_data(provider='joinquant', sleeping_time=0, day_data=True) - - email_action.send_message("5533061@qq.com", 'joinquant record valuation finished', '') - break - except Exception as e: - msg = f'joinquant record valuation error:{e}' - logger.exception(msg) - - email_action.send_message("5533061@qq.com", 'joinquant record valuation error', msg) - time.sleep(60) - - -if __name__ == '__main__': - init_log('joinquant_fund_runner.log') - - record_fund() - - record_valuation() - - sched.start() - - sched._thread.join() diff --git a/examples/recorders/joinquant_kdata_runner.py b/examples/recorders/joinquant_kdata_runner.py deleted file mode 100644 index 61982fc7..00000000 --- a/examples/recorders/joinquant_kdata_runner.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -import logging -import time - -from apscheduler.schedulers.background import BackgroundScheduler - -from zvt import init_log -from zvt.domain import Stock, StockTradeDay, Stock1dHfqKdata -from zvt.informer.informer import EmailInformer - -logger = logging.getLogger(__name__) - -sched = BackgroundScheduler() - - -@sched.scheduled_job('cron', hour=6, minute=0) -def record_stock(): - while True: - email_action = EmailInformer() - - try: - Stock.record_data(provider='joinquant', sleeping_time=1) - StockTradeDay.record_data(provider='joinquant', sleeping_time=1) - email_action.send_message("5533061@qq.com", 'joinquant record stock finished', '') - break - except Exception as e: - msg = f'joinquant record stock:{e}' - logger.exception(msg) - - email_action.send_message("5533061@qq.com", 'joinquant record stock error', msg) - time.sleep(60 * 5) - - -@sched.scheduled_job('cron', hour=15, minute=20) -def record_kdata(): - while True: - email_action = EmailInformer() - - try: - # 日线前复权和后复权数据 - # Stock1dKdata.record_data(provider='joinquant', sleeping_time=0) - Stock1dHfqKdata.record_data(provider='joinquant', sleeping_time=0, day_data=True) - # StockMoneyFlow.record_data(provider='joinquant', sleeping_time=0) - # IndexMoneyFlow.record_data(provider='joinquant', sleeping_time=0) - email_action.send_message("5533061@qq.com", 'joinquant record kdata finished', '') - break - except Exception as e: - msg = f'joinquant record kdata:{e}' - logger.exception(msg) - - email_action.send_message("5533061@qq.com", 'joinquant record kdata error', msg) - time.sleep(60 * 5) - - -if __name__ == '__main__': - init_log('joinquant_kdata_runner.log') - - record_kdata() - - sched.start() - - sched._thread.join() diff --git a/examples/recorders/joinquant_other_data_runner.py b/examples/recorders/joinquant_other_data_runner.py deleted file mode 100644 index c891ba28..00000000 --- a/examples/recorders/joinquant_other_data_runner.py +++ /dev/null @@ -1,79 +0,0 @@ -# -*- coding: utf-8 -*- -import logging -import time - -from apscheduler.schedulers.background import BackgroundScheduler - -from zvt import init_log -from zvt.domain import * -from zvt.informer.informer import EmailInformer - -logger = logging.getLogger(__name__) - -sched = BackgroundScheduler() - - -# 每天下午17:00抓取 -@sched.scheduled_job('cron', hour=17, minute=00) -def record_margin_trading(): - email_action = EmailInformer() - - try: - MarginTrading.record_data(provider='joinquant', sleeping_time=1) - email_action.send_message("5533061@qq.com", 'joinquant record margin trading finished', '') - except Exception as e: - msg = f'joinquant record margin trading:{e}' - logger.exception(msg) - - email_action.send_message("5533061@qq.com", 'joinquant record margin trading error', msg) - time.sleep(60) - - -# 周6抓取 -@sched.scheduled_job('cron', hour=2, minute=00, day_of_week=5) -def record_valuation(): - while True: - email_action = EmailInformer() - - try: - # 个股估值数据 - StockValuation.record_data(provider='joinquant', sleeping_time=1) - - email_action.send_message("5533061@qq.com", 'joinquant record valuation finished', '') - break - except Exception as e: - msg = f'joinquant record kdata:{e}' - logger.exception(msg) - - email_action.send_message("5533061@qq.com", 'joinquant record valuation error', msg) - time.sleep(60) - - -# 周4抓取 -@sched.scheduled_job('cron', hour=19, minute=00, day_of_week=3) -def record_others(): - while True: - email_action = EmailInformer() - - try: - Etf.record_data(provider='joinquant', sleeping_time=1) - EtfStock.record_data(provider='joinquant', sleeping_time=1) - - email_action.send_message("5533061@qq.com", 'joinquant record etf finished', '') - break - except Exception as e: - msg = f'joinquant record etf error:{e}' - logger.exception(msg) - - email_action.send_message("5533061@qq.com", 'joinquant record etf error', msg) - time.sleep(60) - - -if __name__ == '__main__': - init_log('joinquant_other_data_runner.log') - - record_margin_trading() - - sched.start() - - sched._thread.join() diff --git a/examples/recorders/sina_data_runner.py b/examples/recorders/sina_data_runner.py deleted file mode 100644 index b2e1d75b..00000000 --- a/examples/recorders/sina_data_runner.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -import logging -import time - -from apscheduler.schedulers.background import BackgroundScheduler - -from zvt import init_log -from zvt.domain import * -from zvt.informer.informer import EmailInformer - -logger = logging.getLogger(__name__) - -sched = BackgroundScheduler() - - -@sched.scheduled_job('cron', hour=15, minute=30, day_of_week=3) -def record_block(): - while True: - email_action = EmailInformer() - - try: - Block.record_data(provider='sina') - BlockStock.record_data(provider='sina') - - email_action.send_message("5533061@qq.com", 'sina block finished', '') - break - except Exception as e: - msg = f'sina block error:{e}' - logger.exception(msg) - - email_action.send_message("5533061@qq.com", 'sina block error', msg) - time.sleep(60) - - -@sched.scheduled_job('cron', hour=15, minute=30) -def record_money_flow(): - while True: - email_action = EmailInformer() - - try: - BlockMoneyFlow.record_data(provider='sina') - - email_action.send_message("5533061@qq.com", 'sina money flow finished', '') - break - except Exception as e: - msg = f'sina money flow error:{e}' - logger.exception(msg) - - email_action.send_message("5533061@qq.com", 'sina money flow error', msg) - time.sleep(60) - - -if __name__ == '__main__': - init_log('sina_data_runner.log') - - record_block() - record_money_flow() - - sched.start() - - sched._thread.join() diff --git a/examples/report_utils.py b/examples/report_utils.py new file mode 100644 index 00000000..88ed1bf8 --- /dev/null +++ b/examples/report_utils.py @@ -0,0 +1,315 @@ +# -*- coding: utf-8 -*- +import logging +import time +from typing import Type + +from examples.tag_utils import group_stocks_by_tag, get_main_line_tags, get_main_line_hidden_tags +from examples.utils import msg_group_stocks_by_topic +from zvt import zvt_config +from zvt.api.kdata import get_latest_kdata_date, get_kdata_schema, default_adjust_type +from zvt.api.selector import get_limit_up_stocks +from zvt.api.stats import get_top_performance_entities_by_periods, get_top_volume_entities, TopType +from zvt.contract import IntervalLevel +from zvt.contract.api import get_entities, get_entity_schema +from zvt.contract.factor import Factor, TargetType +from zvt.domain import StockNews +from zvt.informer import EmailInformer +from zvt.informer.inform_utils import add_to_eastmoney +from zvt.utils.time_utils import date_time_by_interval + +logger = logging.getLogger("__name__") + + +def inform( + action: EmailInformer, + entity_ids, + target_date, + title, + entity_provider, + entity_type, + em_group, + em_group_over_write, + em_group_over_write_tag=False, + group_by_topic=False, + group_by_tag=True, + special_hidden_tag="北交所", +): + msg = "no targets" + if entity_ids: + entities = get_entities( + provider=entity_provider, entity_type=entity_type, entity_ids=entity_ids, return_type="domain" + ) + entities = [entity for entity in entities if entity.entity_id in entity_ids] + print(len(entities)) + print(len(entity_ids)) + assert len(entities) == len(entity_ids) + + if group_by_topic and (entity_type == "stock"): + StockNews.record_data( + entity_ids=entity_ids, + provider="em", + force_update=False, + sleeping_time=0.05, + day_data=True, + ) + + msg = msg_group_stocks_by_topic(entities=entities, threshold=1, days_ago=60) + logger.info(msg) + action.send_message(zvt_config["email_username"], f"{target_date} {title}", msg) + + if group_by_tag and (entity_type == "stock"): + main_line_hidden_tags = get_main_line_hidden_tags() + sorted_entities = group_stocks_by_tag( + entities=entities, hidden_tags=main_line_hidden_tags + [special_hidden_tag] + ) + msg = "" + main_line = [] + others = [] + special = [] + main_line_tags = get_main_line_tags() + for index, (tag, stocks) in enumerate(sorted_entities): + msg = msg + f"^^^^^^ {tag}[{len(stocks)}/{len(entities)}] ^^^^^^\n" + msg = msg + "\n".join([f"{stock.name}({stock.code})" for stock in stocks]) + "\n" + if tag == special_hidden_tag: + special = special + stocks + elif (not main_line_tags) and (tag != "未知") and (index < 3): + main_line = main_line + stocks + elif main_line_tags and (tag in main_line_tags): + main_line = main_line + stocks + elif main_line_hidden_tags and (tag in main_line_hidden_tags): + main_line = main_line + stocks + else: + others = others + stocks + + # 主线 + if main_line: + codes = [entity.code for entity in main_line] + add_to_eastmoney(codes=codes, entity_type=entity_type, group="主线", over_write=em_group_over_write_tag) + + # 其他 + if others: + codes = [entity.code for entity in others] + if not em_group: + em_group = "其他" + add_to_eastmoney(codes=codes, entity_type=entity_type, group=em_group, over_write=em_group_over_write) + # 特别处理 + if special: + codes = [entity.code for entity in special] + add_to_eastmoney( + codes=codes, entity_type=entity_type, group=special_hidden_tag, over_write=em_group_over_write_tag + ) + else: + if em_group: + try: + codes = [entity.code for entity in entities] + add_to_eastmoney( + codes=codes, entity_type=entity_type, group=em_group, over_write=em_group_over_write + ) + except Exception as e: + action.send_message( + zvt_config["email_username"], + f"{target_date} {title} error", + f"{target_date} {title} error: {e}", + ) + + infos = [f"{entity.name}({entity.code})" for entity in entities] + msg = "\n".join(infos) + "\n" + + logger.info(msg) + action.send_message(zvt_config["email_username"], f"{target_date} {title}", msg) + + +def report_targets( + factor_cls: Type[Factor], + entity_provider, + data_provider, + title, + entity_type="stock", + informer: EmailInformer = None, + em_group=None, + em_group_over_write=True, + em_group_over_write_tag=False, + filter_by_volume=True, + adjust_type=None, + start_timestamp="2019-01-01", + **factor_kv, +): + logger.info( + f"entity_provider: {entity_provider}, data_provider: {data_provider}, entity_type: {entity_type}, start_timestamp: {start_timestamp}" + ) + error_count = 0 + + while error_count <= 10: + try: + if not adjust_type: + adjust_type = default_adjust_type(entity_type=entity_type) + + target_date = get_latest_kdata_date( + provider=data_provider, entity_type=entity_type, adjust_type=adjust_type + ) + logger.info(f"target_date :{target_date}") + + current_entity_pool = None + if filter_by_volume: + # 成交量 + vol_df = get_top_volume_entities( + entity_type=entity_type, + start_timestamp=date_time_by_interval(target_date, -30), + end_timestamp=target_date, + adjust_type=adjust_type, + pct=0.4, + data_provider=data_provider, + ) + current_entity_pool = vol_df.index.tolist() + logger.info(f"current_entity_pool({len(current_entity_pool)}): {current_entity_pool}") + + kdata_schema = get_kdata_schema(entity_type, level=IntervalLevel.LEVEL_1DAY, adjust_type=adjust_type) + filters = [] + if "turnover_threshold" in factor_kv: + filters = filters + [kdata_schema.turnover >= factor_kv.get("turnover_threshold")] + if "turnover_rate_threshold" in factor_kv: + filters = filters + [kdata_schema.turnover_rate >= factor_kv.get("turnover_rate_threshold")] + if filters: + filters = filters + [kdata_schema.timestamp == target_date] + kdata_df = kdata_schema.query_data( + provider=data_provider, filters=filters, columns=["entity_id", "timestamp"], index="entity_id" + ) + if current_entity_pool: + current_entity_pool = set(current_entity_pool) & set(kdata_df.index.tolist()) + else: + current_entity_pool = kdata_df.index.tolist() + + if "entity_ids" in factor_kv: + if current_entity_pool: + current_entity_pool = set(current_entity_pool) & set(factor_kv.pop("entity_ids")) + else: + current_entity_pool = set(factor_kv.pop("entity_ids")) + + # add the factor + entity_schema = get_entity_schema(entity_type=entity_type) + tech_factor = factor_cls( + entity_schema=entity_schema, + entity_provider=entity_provider, + provider=data_provider, + entity_ids=current_entity_pool, + start_timestamp=start_timestamp, + end_timestamp=target_date, + adjust_type=adjust_type, + **factor_kv, + ) + + long_stocks = tech_factor.get_targets(timestamp=target_date, target_type=TargetType.positive) + + inform( + informer, + entity_ids=long_stocks, + target_date=target_date, + title=f"{entity_type} {title}({len(long_stocks)})", + entity_provider=entity_provider, + entity_type=entity_type, + em_group=em_group, + em_group_over_write=em_group_over_write, + em_group_over_write_tag=em_group_over_write_tag, + ) + + break + except Exception as e: + logger.exception("report error:{}".format(e)) + time.sleep(60 * 3) + error_count = error_count + 1 + if error_count == 10: + informer.send_message( + zvt_config["email_username"], + f"report {entity_type}{factor_cls.__name__} error", + f"report {entity_type}{factor_cls.__name__} error: {e}", + ) + + +def report_top_entities( + entity_provider, + data_provider, + periods=None, + ignore_new_stock=True, + ignore_st=True, + entity_ids=None, + entity_type="stock", + adjust_type=None, + top_count=30, + turnover_threshold=100000000, + turnover_rate_threshold=0.02, + informer: EmailInformer = None, + title="最强", + em_group=None, + em_group_over_write=True, + em_group_over_write_tag=False, + return_type=TopType.positive, + include_limit_up=False, +): + error_count = 0 + + if not adjust_type: + adjust_type = default_adjust_type(entity_type=entity_type) + + while error_count <= 10: + try: + target_date = get_latest_kdata_date( + provider=data_provider, entity_type=entity_type, adjust_type=adjust_type + ) + + selected, real_period = get_top_performance_entities_by_periods( + entity_provider=entity_provider, + data_provider=data_provider, + periods=periods, + ignore_new_stock=ignore_new_stock, + ignore_st=ignore_st, + entity_ids=entity_ids, + entity_type=entity_type, + adjust_type=adjust_type, + top_count=top_count, + turnover_threshold=turnover_threshold, + turnover_rate_threshold=turnover_rate_threshold, + return_type=return_type, + ) + + if include_limit_up and (entity_type == "stock"): + limit_up_stocks = get_limit_up_stocks(timestamp=target_date) + if limit_up_stocks: + selected = list(set(selected + limit_up_stocks)) + + inform( + informer, + entity_ids=selected, + target_date=target_date, + title=f"{entity_type} {title}({len(selected)})", + entity_provider=entity_provider, + entity_type=entity_type, + em_group=em_group, + em_group_over_write=em_group_over_write, + em_group_over_write_tag=em_group_over_write_tag, + ) + return real_period + except Exception as e: + logger.exception("report error:{}".format(e)) + time.sleep(30) + error_count = error_count + 1 + + +if __name__ == "__main__": + report_top_entities( + entity_type="block", + entity_provider="em", + data_provider="em", + top_count=10, + periods=[365, 750], + ignore_new_stock=False, + ignore_st=False, + adjust_type=None, + turnover_threshold=50000000, + turnover_rate_threshold=0.005, + em_group=None, + em_group_over_write=False, + return_type=TopType.negative, + ) + +# the __all__ is generated +__all__ = ["report_targets", "report_top_entities"] diff --git a/examples/reports/__init__.py b/examples/reports/__init__.py index c16231d8..e41d48b0 100644 --- a/examples/reports/__init__.py +++ b/examples/reports/__init__.py @@ -2,16 +2,19 @@ import datetime import json import os +from typing import List from sqlalchemy import or_ +from zvt.api.utils import float_to_pct_str +from zvt.contract import ActorType +from zvt.domain import FinanceFactor, BalanceSheet, IncomeStatement, Stock, StockActorSummary from zvt.utils.pd_utils import pd_is_not_null from zvt.utils.time_utils import to_pd_timestamp, now_time_str -from zvt.domain import FinanceFactor, BalanceSheet, IncomeStatement def get_subscriber_emails(): - emails_file = os.path.abspath(os.path.join(os.path.dirname(__file__), 'subscriber_emails.json')) + emails_file = os.path.abspath(os.path.join(os.path.dirname(__file__), "subscriber_emails.json")) with open(emails_file) as f: return json.load(f) @@ -20,36 +23,46 @@ def risky_company(the_date=to_pd_timestamp(now_time_str()), income_yoy=-0.1, pro codes = [] start_timestamp = to_pd_timestamp(the_date) - datetime.timedelta(130) # 营收降,利润降,流动比率低,速动比率低 - finance_filter = or_(FinanceFactor.op_income_growth_yoy < income_yoy, - FinanceFactor.net_profit_growth_yoy <= profit_yoy, - FinanceFactor.current_ratio < 0.7, - FinanceFactor.quick_ratio < 0.5) - df = FinanceFactor.query_data(entity_ids=entity_ids, start_timestamp=start_timestamp, filters=[finance_filter], - columns=['code']) + finance_filter = or_( + FinanceFactor.op_income_growth_yoy < income_yoy, + FinanceFactor.net_profit_growth_yoy <= profit_yoy, + FinanceFactor.current_ratio < 0.7, + FinanceFactor.quick_ratio < 0.5, + ) + df = FinanceFactor.query_data( + entity_ids=entity_ids, start_timestamp=start_timestamp, filters=[finance_filter], columns=["code"] + ) if pd_is_not_null(df): codes = codes + df.code.tolist() # 高应收,高存货,高商誉 - balance_filter = (BalanceSheet.accounts_receivable + BalanceSheet.inventories + BalanceSheet.goodwill) \ - > BalanceSheet.total_equity - df = BalanceSheet.query_data(entity_ids=entity_ids, start_timestamp=start_timestamp, filters=[balance_filter], - columns=['code']) + balance_filter = ( + BalanceSheet.accounts_receivable + BalanceSheet.inventories + BalanceSheet.goodwill + ) > BalanceSheet.total_equity + df = BalanceSheet.query_data( + entity_ids=entity_ids, start_timestamp=start_timestamp, filters=[balance_filter], columns=["code"] + ) if pd_is_not_null(df): codes = codes + df.code.tolist() # 应收>利润*1/2 - df1 = BalanceSheet.query_data(entity_ids=entity_ids, start_timestamp=start_timestamp, - columns=[BalanceSheet.code, BalanceSheet.accounts_receivable]) + df1 = BalanceSheet.query_data( + entity_ids=entity_ids, + start_timestamp=start_timestamp, + columns=[BalanceSheet.code, BalanceSheet.accounts_receivable], + ) if pd_is_not_null(df1): - df1.drop_duplicates(subset='code', keep='last', inplace=True) - df1 = df1.set_index('code', drop=True).sort_index() + df1.drop_duplicates(subset="code", keep="last", inplace=True) + df1 = df1.set_index("code", drop=True).sort_index() - df2 = IncomeStatement.query_data(entity_ids=entity_ids, start_timestamp=start_timestamp, - columns=[IncomeStatement.code, - IncomeStatement.net_profit]) + df2 = IncomeStatement.query_data( + entity_ids=entity_ids, + start_timestamp=start_timestamp, + columns=[IncomeStatement.code, IncomeStatement.net_profit], + ) if pd_is_not_null(df2): - df2.drop_duplicates(subset='code', keep='last', inplace=True) - df2 = df2.set_index('code', drop=True).sort_index() + df2.drop_duplicates(subset="code", keep="last", inplace=True) + df2 = df2.set_index("code", drop=True).sort_index() if pd_is_not_null(df1) and pd_is_not_null(df2): codes = codes + df1[df1.accounts_receivable > df2.net_profit / 2].index.tolist() @@ -57,5 +70,39 @@ def risky_company(the_date=to_pd_timestamp(now_time_str()), income_yoy=-0.1, pro return list(set(codes)) -if __name__ == '__main__': +def stocks_with_info(stocks: List[Stock]): + infos = [] + for stock in stocks: + info = f"{stock.name}({stock.code})" + summary: List[StockActorSummary] = StockActorSummary.query_data( + entity_id=stock.entity_id, + order=StockActorSummary.timestamp.desc(), + filters=[StockActorSummary.actor_type == ActorType.raised_fund.value], + limit=1, + return_type="domain", + ) + if summary: + info = ( + info + + f"([{summary[0].timestamp}]共{summary[0].actor_count}家基金持股占比:{float_to_pct_str(summary[0].holding_ratio)}, 变化: {float_to_pct_str(summary[0].change_ratio)})" + ) + + summary: List[StockActorSummary] = StockActorSummary.query_data( + entity_id=stock.entity_id, + order=StockActorSummary.timestamp.desc(), + filters=[StockActorSummary.actor_type == ActorType.qfii.value], + limit=1, + return_type="domain", + ) + if summary: + info = ( + info + + f"([{summary[0].timestamp}]共{summary[0].actor_count}家qfii持股占比:{float_to_pct_str(summary[0].holding_ratio)}, 变化: {float_to_pct_str(summary[0].change_ratio)})" + ) + + infos.append(info) + return infos + + +if __name__ == "__main__": print(get_subscriber_emails()) diff --git a/examples/reports/report_bull.py b/examples/reports/report_bull.py new file mode 100644 index 00000000..3292ac24 --- /dev/null +++ b/examples/reports/report_bull.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +import logging + +from apscheduler.schedulers.background import BackgroundScheduler + +from examples.factors.tech_factor import BullAndUpFactor +from examples.report_utils import report_targets +from zvt import init_log +from zvt.api.kdata import get_latest_kdata_date +from zvt.api.selector import get_middle_and_big_stock +from zvt.contract import AdjustType +from zvt.informer import EmailInformer + +logger = logging.getLogger(__name__) + +sched = BackgroundScheduler() +email_informer = EmailInformer() + + +@sched.scheduled_job("cron", hour=18, minute=0, day_of_week="mon-fri") +def report_bull(): + target_date = get_latest_kdata_date(entity_type="stock", adjust_type=AdjustType.hfq, provider="em") + entity_ids = get_middle_and_big_stock(timestamp=target_date) + + report_targets( + factor_cls=BullAndUpFactor, + entity_provider="em", + data_provider="em", + title="bull股票", + entity_type="stock", + informer=email_informer, + em_group="bull股票", + em_group_over_write=False, + filter_by_volume=False, + adjust_type=AdjustType.hfq, + start_timestamp="2019-01-01", + turnover_threshold=300000000, + turnover_rate_threshold=0.02, + entity_ids=entity_ids, + ) + report_targets( + factor_cls=BullAndUpFactor, + entity_provider="em", + data_provider="em", + title="bull板块", + entity_type="block", + informer=email_informer, + em_group="bull股票", + em_group_over_write=False, + filter_by_volume=False, + adjust_type=AdjustType.qfq, + start_timestamp="2019-01-01", + turnover_threshold=10000000000, + turnover_rate_threshold=0.02, + ) + + +if __name__ == "__main__": + init_log("report_bull.log") + + report_bull() + + sched.start() + + sched._thread.join() diff --git a/examples/reports/report_core_compay.py b/examples/reports/report_core_compay.py index 41f4998e..d76b392e 100644 --- a/examples/reports/report_core_compay.py +++ b/examples/reports/report_core_compay.py @@ -2,17 +2,17 @@ import logging import time -import eastmoneypy from apscheduler.schedulers.background import BackgroundScheduler from examples.factors.fundamental_selector import FundamentalSelector -from examples.reports import get_subscriber_emails +from examples.reports import get_subscriber_emails, stocks_with_info +from zvt import init_log, zvt_config from zvt.contract.api import get_entities -from zvt.utils.time_utils import now_pd_timestamp, to_time_str -from zvt import init_log from zvt.domain import Stock from zvt.factors.target_selector import TargetSelector +from zvt.informer.inform_utils import add_to_eastmoney from zvt.informer.informer import EmailInformer +from zvt.utils.time_utils import now_pd_timestamp, to_time_str logger = logging.getLogger(__name__) @@ -20,7 +20,7 @@ # 基本面选股 每周一次即可 基本无变化 -@sched.scheduled_job('cron', hour=16, minute=0, day_of_week='6') +@sched.scheduled_job("cron", hour=16, minute=0, day_of_week="6") def report_core_company(): while True: error_count = 0 @@ -34,47 +34,47 @@ def report_core_company(): target_date = to_time_str(now_pd_timestamp()) - my_selector: TargetSelector = FundamentalSelector(start_timestamp='2016-01-01', end_timestamp=target_date) + my_selector: TargetSelector = FundamentalSelector(start_timestamp="2016-01-01", end_timestamp=target_date) my_selector.run() long_targets = my_selector.get_open_long_targets(timestamp=target_date) if long_targets: - stocks = get_entities(provider='joinquant', entity_schema=Stock, entity_ids=long_targets, - return_type='domain') + stocks = get_entities( + provider="joinquant", entity_schema=Stock, entity_ids=long_targets, return_type="domain" + ) # add them to eastmoney try: - try: - eastmoneypy.del_group('core') - except: - pass - eastmoneypy.create_group('core') - for stock in stocks: - eastmoneypy.add_to_group(stock.code, group_name='core') + codes = [stock.code for stock in stocks] + add_to_eastmoney(codes=codes, entity_type="stock", group="core") except Exception as e: - email_action.send_message("5533061@qq.com", f'report_core_company error', - 'report_core_company error:{}'.format(e)) - - info = [f'{stock.name}({stock.code})' for stock in stocks] - msg = ' '.join(info) + email_action.send_message( + zvt_config["email_username"], + f"report_core_company error", + "report_core_company error:{}".format(e), + ) + + infos = stocks_with_info(stocks) + msg = "\n".join(infos) else: - msg = 'no targets' + msg = "no targets" logger.info(msg) - email_action.send_message(get_subscriber_emails(), f'{to_time_str(target_date)} 核心资产选股结果', msg) + email_action.send_message(get_subscriber_emails(), f"{to_time_str(target_date)} 核心资产选股结果", msg) break except Exception as e: - logger.exception('report_core_company error:{}'.format(e)) + logger.exception("report_core_company error:{}".format(e)) time.sleep(60 * 3) error_count = error_count + 1 if error_count == 10: - email_action.send_message("5533061@qq.com", f'report_core_company error', - 'report_core_company error:{}'.format(e)) + email_action.send_message( + zvt_config["email_username"], f"report_core_company error", "report_core_company error:{}".format(e) + ) -if __name__ == '__main__': - init_log('report_core_company.log') +if __name__ == "__main__": + init_log("report_core_company.log") report_core_company() diff --git a/examples/reports/report_top_stats.py b/examples/reports/report_top_stats.py deleted file mode 100644 index 0a0af152..00000000 --- a/examples/reports/report_top_stats.py +++ /dev/null @@ -1,110 +0,0 @@ -# -*- coding: utf-8 -*- -import logging - -import eastmoneypy -from apscheduler.schedulers.background import BackgroundScheduler -from tabulate import tabulate - -from zvt import init_log, zvt_config -from zvt.api import get_top_performance_entities, get_top_volume_entities -from zvt.contract.api import get_entity_ids, decode_entity_id -from zvt.domain import Stock -from zvt.domain import Stock1dHfqKdata -from zvt.informer.informer import EmailInformer -from zvt.utils.time_utils import next_date - -logger = logging.getLogger(__name__) - -sched = BackgroundScheduler() - - -@sched.scheduled_job('cron', hour=18, minute=30, day_of_week='mon-fri') -def report_top_stats(periods=[7, 30, 180, 365], ignore_new_stock=True): - latest_day: Stock1dHfqKdata = Stock1dHfqKdata.query_data(order=Stock1dHfqKdata.timestamp.desc(), limit=1, - return_type='domain') - current_timestamp = latest_day[0].timestamp - email_action = EmailInformer() - - # 至少上市一年 - filters = None - if ignore_new_stock: - pre_year = next_date(current_timestamp, -365) - - stocks = get_entity_ids(provider='joinquant', entity_schema=Stock, filters=[Stock.timestamp <= pre_year]) - filters = [Stock1dHfqKdata.entity_id.in_(stocks)] - - stats = [] - ups = [] - downs = [] - - for period in periods: - start = next_date(current_timestamp, -period) - df, _ = get_top_performance_entities(start_timestamp=start, filters=filters, pct=1, show_name=True) - df.rename(columns={'score': f'score_{period}'}, inplace=True) - ups.append(tabulate(df.iloc[:50], headers='keys')) - downs.append(tabulate(df.iloc[-50:], headers='keys')) - - stats.append(tabulate(df.describe(), headers='keys')) - - # 最近一个月最靓仔的 - if period == 30: - # add them to eastmoney - try: - try: - eastmoneypy.del_group('最靓仔') - except: - pass - eastmoneypy.create_group('最靓仔') - for entity_id in df.index[:50]: - _, _, code = decode_entity_id(entity_id) - eastmoneypy.add_to_group(code=code, group_name='最靓仔') - except Exception as e: - logger.exception(e) - email_action.send_message("5533061@qq.com", f'report_top_stats error', - 'report_top_stats error:{}'.format(e)) - - # 一年内没怎么动的 - if period == 365: - stable_df = df[(df['score_365'] > -0.1) & (df['score_365'] < 0.1)] - vol_df = get_top_volume_entities(entity_ids=stable_df.index.tolist(), start_timestamp=start) - - # add them to eastmoney - try: - try: - eastmoneypy.del_group('躺尸一年') - except: - pass - eastmoneypy.create_group('躺尸一年') - for entity_id in vol_df.index[:50]: - _, _, code = decode_entity_id(entity_id) - eastmoneypy.add_to_group(code=code, group_name='躺尸一年') - except Exception as e: - logger.exception(e) - email_action.send_message(zvt_config['email_username'], f'report_top_stats error', - 'report_top_stats error:{}'.format(e)) - - msg = '\n' - for s in stats: - msg = msg + s + '\n' - email_action.send_message(zvt_config['email_username'], f'{current_timestamp} 统计报告', msg) - - msg = '\n' - for up in ups: - msg = msg + up + '\n' - email_action.send_message(zvt_config['email_username'], f'{current_timestamp} 涨幅统计报告', msg) - - msg = '\n' - for down in downs: - msg = msg + down + '\n' - - email_action.send_message(zvt_config['email_username'], f'{current_timestamp} 跌幅统计报告', msg) - - -if __name__ == '__main__': - init_log('report_top_stats.log') - - report_top_stats() - - sched.start() - - sched._thread.join() diff --git a/examples/reports/report_tops.py b/examples/reports/report_tops.py new file mode 100644 index 00000000..feec5fd6 --- /dev/null +++ b/examples/reports/report_tops.py @@ -0,0 +1,184 @@ +# -*- coding: utf-8 -*- +import logging + +from apscheduler.schedulers.background import BackgroundScheduler + +from examples.report_utils import report_top_entities, inform +from zvt import init_log +from zvt.api.stats import TopType, get_latest_kdata_date +from zvt.contract import AdjustType +from zvt.domain import Block, BlockCategory +from zvt.factors.top_stocks import get_top_stocks +from zvt.informer import EmailInformer + +logger = logging.getLogger(__name__) + +sched = BackgroundScheduler() + +email_informer = EmailInformer() + + +@sched.scheduled_job("cron", hour=17, minute=0, day_of_week="mon-fri") +def report_top_stocks(): + # compute_top_stocks() + provider = "em" + entity_type = "stock" + target_date = get_latest_kdata_date(provider=provider, entity_type=entity_type, adjust_type=AdjustType.hfq) + selected = get_top_stocks(target_date=target_date, return_type="short") + + inform( + email_informer, + entity_ids=selected, + target_date=target_date, + title=f"stock 短期最强({len(selected)})", + entity_provider=provider, + entity_type=entity_type, + em_group="短期最强", + em_group_over_write=True, + em_group_over_write_tag=True, + ) + selected = get_top_stocks(target_date=target_date, return_type="long") + + inform( + email_informer, + entity_ids=selected, + target_date=target_date, + title=f"stock 中期最强({len(selected)})", + entity_provider=provider, + entity_type=entity_type, + em_group="中期最强", + em_group_over_write=True, + em_group_over_write_tag=False, + ) + + # report_top_entities( + # entity_type="stock", + # entity_provider="em", + # data_provider="em", + # periods=[365, 750], + # ignore_new_stock=False, + # ignore_st=True, + # adjust_type=None, + # top_count=25, + # turnover_threshold=100000000, + # turnover_rate_threshold=0.01, + # informer=email_informer, + # em_group="谁有我惨", + # em_group_over_write=True, + # return_type=TopType.negative, + # ) + + +@sched.scheduled_job("cron", hour=17, minute=30, day_of_week="mon-fri") +def report_top_blocks(): + df = Block.query_data(filters=[Block.category == BlockCategory.industry.value], index="entity_id") + + entity_ids = df.index.tolist() + report_top_entities( + entity_type="block", + entity_provider="em", + data_provider="em", + periods=[*range(2, 30)], + ignore_new_stock=False, + ignore_st=False, + adjust_type=None, + top_count=10, + turnover_threshold=0, + turnover_rate_threshold=0, + informer=email_informer, + em_group="最强行业", + title="最强行业", + em_group_over_write=True, + return_type=TopType.positive, + entity_ids=entity_ids, + ) + + df = Block.query_data(filters=[Block.category == BlockCategory.concept.value], index="entity_id") + df = df[~df.name.str.contains("昨日")] + entity_ids = df.index.tolist() + report_top_entities( + entity_type="block", + entity_provider="em", + data_provider="em", + periods=[*range(2, 30)], + ignore_new_stock=False, + ignore_st=False, + adjust_type=None, + top_count=10, + turnover_threshold=0, + turnover_rate_threshold=0, + informer=email_informer, + em_group="最强概念", + title="最强概念", + em_group_over_write=True, + return_type=TopType.positive, + entity_ids=entity_ids, + ) + + +@sched.scheduled_job("cron", hour=17, minute=30, day_of_week="mon-fri") +def report_top_stockhks(): + report_top_entities( + entity_type="stockhk", + entity_provider="em", + data_provider="em", + top_count=10, + periods=[*range(1, 15)], + ignore_new_stock=False, + ignore_st=False, + adjust_type=None, + turnover_threshold=30000000, + turnover_rate_threshold=0.01, + informer=email_informer, + em_group="短期最强", + title="短期最强", + em_group_over_write=False, + return_type=TopType.positive, + ) + + report_top_entities( + entity_type="stockhk", + entity_provider="em", + data_provider="em", + top_count=10, + periods=[30, 50], + ignore_new_stock=True, + ignore_st=False, + adjust_type=None, + turnover_threshold=30000000, + turnover_rate_threshold=0.01, + informer=email_informer, + em_group="中期最强", + title="中期最强", + em_group_over_write=False, + return_type=TopType.positive, + ) + + # report_top_entities( + # entity_type="stockhk", + # entity_provider="em", + # data_provider="em", + # top_count=20, + # periods=[365, 750], + # ignore_new_stock=True, + # ignore_st=False, + # adjust_type=None, + # turnover_threshold=50000000, + # turnover_rate_threshold=0.005, + # informer=email_informer, + # em_group="谁有我惨", + # em_group_over_write=False, + # return_type=TopType.negative, + # ) + + +if __name__ == "__main__": + init_log("report_tops.log") + + report_top_stocks() + # report_top_blocks() + report_top_stockhks() + + sched.start() + + sched._thread.join() diff --git a/examples/reports/report_vol_up.py b/examples/reports/report_vol_up.py new file mode 100644 index 00000000..e14748b1 --- /dev/null +++ b/examples/reports/report_vol_up.py @@ -0,0 +1,84 @@ +# -*- coding: utf-8 -*- +import logging + +from zvt.factors.ma import VolumeUpMaFactor +from apscheduler.schedulers.background import BackgroundScheduler + +from examples.report_utils import report_targets, inform +from zvt import init_log +from zvt.api.kdata import get_latest_kdata_date +from zvt.contract import AdjustType +from zvt.factors.top_stocks import get_top_stocks +from zvt.informer import EmailInformer + +logger = logging.getLogger(__name__) + +sched = BackgroundScheduler() + +email_informer = EmailInformer() + + +@sched.scheduled_job("cron", hour=17, minute=0, day_of_week="mon-fri") +def report_vol_up_stocks(): + provider = "em" + entity_type = "stock" + target_date = get_latest_kdata_date(provider=provider, entity_type=entity_type, adjust_type=AdjustType.hfq) + selected = get_top_stocks(target_date=target_date, return_type="small_vol_up") + + inform( + email_informer, + entity_ids=selected, + target_date=target_date, + title=f"stock 放量突破(半)年线小市值股票({len(selected)})", + entity_provider=provider, + entity_type=entity_type, + em_group="年线股票", + em_group_over_write=True, + em_group_over_write_tag=False, + ) + selected = get_top_stocks(target_date=target_date, return_type="big_vol_up") + + inform( + email_informer, + entity_ids=selected, + target_date=target_date, + title=f"stock 放量突破(半)年线大市值股票({len(selected)})", + entity_provider=provider, + entity_type=entity_type, + em_group="年线股票", + em_group_over_write=False, + em_group_over_write_tag=False, + ) + + +@sched.scheduled_job("cron", hour=17, minute=30, day_of_week="mon-fri") +def report_vol_up_stockhks(): + report_targets( + factor_cls=VolumeUpMaFactor, + entity_provider="em", + data_provider="em", + informer=email_informer, + em_group="年线股票", + title="放量突破(半)年线港股", + entity_type="stockhk", + em_group_over_write=False, + filter_by_volume=False, + adjust_type=AdjustType.hfq, + start_timestamp="2021-01-01", + # factor args + windows=[120, 250], + over_mode="or", + up_intervals=60, + turnover_threshold=100000000, + turnover_rate_threshold=0.01, + ) + + +if __name__ == "__main__": + init_log("report_vol_up.log") + + report_vol_up_stocks() + report_vol_up_stockhks() + sched.start() + + sched._thread.join() diff --git a/examples/reports/report_vol_up120.py b/examples/reports/report_vol_up120.py deleted file mode 100644 index 05a5fe7f..00000000 --- a/examples/reports/report_vol_up120.py +++ /dev/null @@ -1,105 +0,0 @@ -# -*- coding: utf-8 -*- -import datetime -import logging -import time - -import eastmoneypy -from apscheduler.schedulers.background import BackgroundScheduler - -from zvt import init_log -from zvt.contract.api import get_entities -from zvt.domain import Stock, StockValuation, Stock1dHfqKdata -from zvt.factors import VolumeUpMaFactor -from zvt.factors.target_selector import TargetSelector -from zvt.informer.informer import EmailInformer - -logger = logging.getLogger(__name__) - -sched = BackgroundScheduler() - - -@sched.scheduled_job('cron', hour=19, minute=30, day_of_week='mon-fri') -def report_vol_up_120(): - while True: - error_count = 0 - email_action = EmailInformer() - - try: - # 抓取k线数据 - # StockTradeDay.record_data(provider='joinquant') - # Stock1dKdata.record_data(provider='joinquant') - - latest_day: Stock1dHfqKdata = Stock1dHfqKdata.query_data(order=Stock1dHfqKdata.timestamp.desc(), limit=1, - return_type='domain') - target_date = latest_day[0].timestamp - - # 计算均线 - my_selector = TargetSelector(start_timestamp='2019-06-01', end_timestamp=target_date) - # add the factors - factor1 = VolumeUpMaFactor(start_timestamp='2019-06-01', end_timestamp=target_date, windows=[120]) - - my_selector.add_filter_factor(factor1) - - my_selector.run() - - long_stocks = my_selector.get_open_long_targets(timestamp=target_date) - - msg = 'no targets' - - # 过滤亏损股 - # check StockValuation data - pe_date = target_date - datetime.timedelta(10) - if StockValuation.query_data(start_timestamp=pe_date, limit=1, return_type='domain'): - positive_df = StockValuation.query_data(provider='joinquant', entity_ids=long_stocks, - start_timestamp=pe_date, - filters=[StockValuation.pe > 0], - columns=['entity_id']) - bad_stocks = set(long_stocks) - set(positive_df['entity_id'].tolist()) - if bad_stocks: - stocks = get_entities(provider='joinquant', entity_schema=Stock, entity_ids=bad_stocks, - return_type='domain') - info = [f'{stock.name}({stock.code})' for stock in stocks] - msg = '亏损股:' + ' '.join(info) + '\n' - - long_stocks = set(positive_df['entity_id'].tolist()) - - if long_stocks: - stocks = get_entities(provider='joinquant', entity_schema=Stock, entity_ids=long_stocks, - return_type='domain') - # add them to eastmoney - try: - try: - eastmoneypy.create_group('tech') - except: - pass - for stock in stocks: - eastmoneypy.add_to_group(stock.code, group_name='tech') - except Exception as e: - email_action.send_message("5533061@qq.com", f'report_vol_up_120 error', - 'report_vol_up_120 error:{}'.format(e)) - - info = [f'{stock.name}({stock.code})' for stock in stocks] - msg = msg + '盈利股:' + ' '.join(info) + '\n' - - logger.info(msg) - - email_action.send_message('5533061@qq.com', f'{target_date} 改进版放量突破半年线选股结果', msg) - - break - except Exception as e: - logger.exception('report_vol_up_120 error:{}'.format(e)) - time.sleep(60 * 3) - error_count = error_count + 1 - if error_count == 10: - email_action.send_message("5533061@qq.com", f'report_vol_up_120 error', - 'report_vol_up_120 error:{}'.format(e)) - - -if __name__ == '__main__': - init_log('report_vol_up_120.log') - - report_vol_up_120() - - sched.start() - - sched._thread.join() diff --git a/examples/reports/report_vol_up250.py b/examples/reports/report_vol_up250.py deleted file mode 100644 index 826f7ed6..00000000 --- a/examples/reports/report_vol_up250.py +++ /dev/null @@ -1,107 +0,0 @@ -# -*- coding: utf-8 -*- -import datetime -import logging -import time - -import eastmoneypy -from apscheduler.schedulers.background import BackgroundScheduler - -from examples.reports import get_subscriber_emails -from zvt import init_log -from zvt.contract.api import get_entities -from zvt.domain import Stock, StockValuation, Stock1dHfqKdata -from zvt.factors import VolumeUpMaFactor -from zvt.factors.target_selector import TargetSelector -from zvt.informer.informer import EmailInformer - -logger = logging.getLogger(__name__) - -sched = BackgroundScheduler() - - -@sched.scheduled_job('cron', hour=19, minute=0, day_of_week='mon-fri') -def report_vol_up_250(): - while True: - error_count = 0 - email_action = EmailInformer() - - try: - # 抓取k线数据 - # StockTradeDay.record_data(provider='joinquant') - # Stock1dKdata.record_data(provider='joinquant') - - latest_day: Stock1dHfqKdata = Stock1dHfqKdata.query_data(order=Stock1dHfqKdata.timestamp.desc(), limit=1, - return_type='domain') - target_date = latest_day[0].timestamp - - # 计算均线 - my_selector = TargetSelector(start_timestamp='2018-10-01', end_timestamp=target_date) - # add the factors - factor1 = VolumeUpMaFactor(start_timestamp='2018-10-01', end_timestamp=target_date) - - my_selector.add_filter_factor(factor1) - - my_selector.run() - - long_stocks = my_selector.get_open_long_targets(timestamp=target_date) - - msg = 'no targets' - - # 过滤亏损股 - # check StockValuation data - pe_date = target_date - datetime.timedelta(10) - if StockValuation.query_data(start_timestamp=pe_date, limit=1, return_type='domain'): - positive_df = StockValuation.query_data(provider='joinquant', entity_ids=long_stocks, - start_timestamp=pe_date, - filters=[StockValuation.pe > 0], - columns=['entity_id']) - bad_stocks = set(long_stocks) - set(positive_df['entity_id'].tolist()) - if bad_stocks: - stocks = get_entities(provider='joinquant', entity_schema=Stock, entity_ids=bad_stocks, - return_type='domain') - info = [f'{stock.name}({stock.code})' for stock in stocks] - msg = '亏损股:' + ' '.join(info) + '\n' - - long_stocks = set(positive_df['entity_id'].tolist()) - - if long_stocks: - stocks = get_entities(provider='joinquant', entity_schema=Stock, entity_ids=long_stocks, - return_type='domain') - # add them to eastmoney - try: - try: - eastmoneypy.del_group('tech') - except: - pass - eastmoneypy.create_group('tech') - for stock in stocks: - eastmoneypy.add_to_group(stock.code, group_name='tech') - except Exception as e: - email_action.send_message("5533061@qq.com", f'report_vol_up_250 error', - 'report_vol_up_250 error:{}'.format(e)) - - info = [f'{stock.name}({stock.code})' for stock in stocks] - msg = msg + '盈利股:' + ' '.join(info) + '\n' - - logger.info(msg) - - email_action.send_message(get_subscriber_emails(), f'{target_date} 改进版放量突破年线选股结果', msg) - - break - except Exception as e: - logger.exception('report_vol_up_250 error:{}'.format(e)) - time.sleep(60 * 3) - error_count = error_count + 1 - if error_count == 10: - email_action.send_message("5533061@qq.com", f'report_vol_up_250 error', - 'report_vol_up_250 error:{}'.format(e)) - - -if __name__ == '__main__': - init_log('report_vol_up_250.log') - - report_vol_up_250() - - sched.start() - - sched._thread.join() diff --git a/examples/reports/subscriber_emails.json b/examples/reports/subscriber_emails.json index 72d2c283..6eb2a780 100644 --- a/examples/reports/subscriber_emails.json +++ b/examples/reports/subscriber_emails.json @@ -1,80 +1,4 @@ [ - "443012931@qq.com", - "764567192@qq.com", - "2242535441@qq.com", - "2315983623@qq.com", - "31591084@qq.com", - "348886500@qq.com", - "359101562@qq.com", - "1985547858@qq.com", - "5533061@qq.com", - "dragonllt_usdt@163.com", - "laosiji@protonmail.com", - "bytegen@126.com", - "manstiilin@protonmail.com", - "evergreen214@163.com", - "lh200214@163.com", - "2030988@qq.com", - "victor_yang@jianxunsoft.com", - "stellar2020@qq.com", - "qimaolvdeafanti@126.com", - "121750972@qq.com", - "yeyang521@vip.qq.com", - "370634668@qq.com", - "m18201575737@163.com", - "wisfern@qq.com", - "494710584@qq.com", - "172141102@qq.com", - "6639157@qq.com", - "jiwenkangatech@foxmail.com", - "570672340@qq.com", - "zhy0216@gmail.com", - "17613804@qq.com", - "yyjsir@sohu.com", - "549155872@qq.com", - "13122260573@163.com", - "2451584@qq.com", - "123847@qq.com", - "519706968@qq.com", - "nidf@live.cn", - "1211698784@qq.com", - "519706968@qq.com ", - "3377574502@qq.com", - "279659769@qq.com", - "0102152879@163.com", - "273404452@qq.com", - "saiksy@qq.com", - "brother0952@sina.com", - "18617339965@163.com", - "495173073@qq.com", - "49754074@qq.com", - "270601413@qq.com", - "327714319@qq.com ", - "79292522@qq.com", - "1749458504@qq.com", - "1216945411@qq.com", - "763234119@qq.com", - "xiaokaxie@sina.com", - "784568830@qq.com", - "172654682@qq.com", - "h_yatu@163.com", - "361616298@qq.com", - "283387714@qq.com", - "1277503624@qq.com", - "295066056@qq.com", - "164502036@qq.com", - "2305841739@qq.com", - "1034584630@qq.com", - "522971846@qq.com", - "shaode2012@sina.cn", - "fengbo_wu@163.com", - "laura_yy@live.cn", - "magangtongxue@163.com", - "jiudada@163.com", - "934523197@qq.com", - "969152951@qq.com", - "jarodgeng@gmail.com", - "307833035@qq.com", - "651133121@qq.com", - "476562694@qq.com" + "test@qq.com", + "test1@qq.com" ] diff --git a/examples/requirements.txt b/examples/requirements.txt index e6d81bb5..290c539e 100644 --- a/examples/requirements.txt +++ b/examples/requirements.txt @@ -1,4 +1,4 @@ -zvt >= 0.9.0 +zvt >= 0.10.1 apscheduler >= 3.4.0 -eastmoneypy >= 0.0.6 -tabulate>=0.8.8 \ No newline at end of file +tabulate>=0.8.8 +ta \ No newline at end of file diff --git a/zvt/ui/assets/__init__.py b/examples/research/__init__.py similarity index 100% rename from zvt/ui/assets/__init__.py rename to examples/research/__init__.py diff --git a/examples/research/dragon_and_tiger.py b/examples/research/dragon_and_tiger.py new file mode 100644 index 00000000..cf25879e --- /dev/null +++ b/examples/research/dragon_and_tiger.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +from zvt.api.selector import get_big_players, get_player_success_rate +from zvt.domain import DragonAndTiger +from zvt.utils.time_utils import date_time_by_interval, current_date + +if __name__ == "__main__": + provider = "em" + DragonAndTiger.record_data(provider=provider) + end_timestamp = date_time_by_interval(current_date(), -60) + # recent year + start_timestamp = date_time_by_interval(end_timestamp, -400) + print(f"{start_timestamp} to {end_timestamp}") + players = get_big_players(start_timestamp=start_timestamp, end_timestamp=end_timestamp) + print(players) + df = get_player_success_rate( + start_timestamp=start_timestamp, end_timestamp=end_timestamp, intervals=[3, 5, 10], players=players + ) + print(df) diff --git a/examples/research/top_dragon_tiger.py b/examples/research/top_dragon_tiger.py new file mode 100644 index 00000000..0dac4a52 --- /dev/null +++ b/examples/research/top_dragon_tiger.py @@ -0,0 +1,123 @@ +# -*- coding: utf-8 -*- +from typing import Optional, Type, List, Union + +import pandas as pd + +from zvt.api.selector import get_players +from zvt.api.stats import get_top_performance_by_month +from zvt.contract import TradableEntity, IntervalLevel, AdjustType +from zvt.contract.factor import Transformer, Accumulator +from zvt.domain import Stock +from zvt.factors.technical_factor import TechnicalFactor +from zvt.utils.pd_utils import pd_is_not_null +from zvt.utils.time_utils import pre_month_start_date, date_time_by_interval + + +def top_dragon_and_tiger(data_provider="em", start_timestamp="2021-01-01", end_timestamp="2022-01-01"): + dfs = [] + for start_date, end_date, df in get_top_performance_by_month( + start_timestamp=start_timestamp, end_timestamp=end_timestamp, list_days=250, data_provider=data_provider + ): + pre_month_start = pre_month_start_date(start_date) + for entity_id in df.index[:30]: + players = get_players( + entity_id=entity_id, + start_timestamp=date_time_by_interval(start_date, 15), + end_timestamp=end_timestamp, + provider=data_provider, + direction="in", + ) + print(players) + dfs.append(players) + + player_df = pd.concat(dfs, sort=True) + return player_df.sort_index(level=[0, 1]) + + +class DragonTigerFactor(TechnicalFactor): + def __init__( + self, + entity_id: str, + entity_schema: Type[TradableEntity] = Stock, + provider: str = "em", + entity_provider: str = "em", + exchanges: List[str] = None, + codes: List[str] = None, + start_timestamp: Union[str, pd.Timestamp] = None, + end_timestamp: Union[str, pd.Timestamp] = None, + columns: List = None, + filters: List = None, + order: object = None, + limit: int = None, + level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY, + category_field: str = "entity_id", + time_field: str = "timestamp", + keep_window: int = None, + keep_all_timestamp: bool = False, + fill_method: str = "ffill", + effective_number: int = None, + transformer: Transformer = None, + accumulator: Accumulator = None, + need_persist: bool = False, + only_compute_factor: bool = False, + factor_name: str = None, + clear_state: bool = False, + only_load_factor: bool = False, + adjust_type: Union[AdjustType, str] = None, + ) -> None: + super().__init__( + entity_schema, + provider, + entity_provider, + [entity_id], + exchanges, + codes, + start_timestamp, + end_timestamp, + columns, + filters, + order, + limit, + level, + category_field, + time_field, + keep_window, + keep_all_timestamp, + fill_method, + effective_number, + transformer, + accumulator, + need_persist, + only_compute_factor, + factor_name, + clear_state, + only_load_factor, + adjust_type, + ) + self.player_df = get_players( + entity_id=entity_id, + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + provider="em", + direction="in", + ) + + def drawer_annotation_df(self) -> Optional[pd.DataFrame]: + def order_type_flag(df): + return "
".join(df.tolist()) + + if pd_is_not_null(self.player_df): + annotation_df = self.player_df.copy() + annotation_df["value"] = self.factor_df.loc[annotation_df.index]["close"] + annotation_df["flag"] = annotation_df[["dep1", "dep2", "dep3", "dep4", "dep5"]].apply( + lambda x: order_type_flag(x), axis=1 + ) + annotation_df["color"] = "#ff7f0e" + return annotation_df + + +if __name__ == "__main__": + top_dragon_and_tiger() + # Stock1dHfqKdata.record_data(entity_id="stock_sz_002561", provider="em") + # f = DragonTigerFactor(entity_id="stock_sz_002561", provider="em") + # f.draw(show=True) diff --git a/examples/research/top_tags.py b/examples/research/top_tags.py new file mode 100644 index 00000000..8f264170 --- /dev/null +++ b/examples/research/top_tags.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +from zvt.api.stats import get_top_performance_by_month +from zvt.domain import Stock1dHfqKdata +from zvt.utils.time_utils import date_time_by_interval, month_end_date, is_same_date + + +# 每月涨幅前30,市值90%分布在100亿以下 +# 重复上榜的有1/4左右 +# 连续两个月上榜的1/10左右 +def top_tags(data_provider="em", start_timestamp="2020-01-01", end_timestamp="2021-01-01"): + records = [] + for _, timestamp, df in get_top_performance_by_month( + start_timestamp=start_timestamp, end_timestamp=end_timestamp, list_days=250, data_provider=data_provider + ): + for entity_id in df.index[:30]: + query_timestamp = timestamp + while True: + kdata = Stock1dHfqKdata.query_data( + provider=data_provider, + entity_id=entity_id, + start_timestamp=query_timestamp, + order=Stock1dHfqKdata.timestamp.asc(), + limit=1, + return_type="domain", + ) + if not kdata or kdata[0].turnover_rate == 0: + if is_same_date(query_timestamp, month_end_date(query_timestamp)): + break + query_timestamp = date_time_by_interval(query_timestamp) + continue + cap = kdata[0].turnover / kdata[0].turnover_rate + break + + records.append( + {"entity_id": entity_id, "timestamp": timestamp, "cap": cap, "score": df.loc[entity_id, "score"]} + ) + + return records + + +if __name__ == "__main__": + print(top_tags()) diff --git a/examples/result.json b/examples/result.json new file mode 100644 index 00000000..9e26dfee --- /dev/null +++ b/examples/result.json @@ -0,0 +1 @@ +{} \ No newline at end of file diff --git a/examples/stock_tags.json b/examples/stock_tags.json new file mode 100644 index 00000000..391a2578 --- /dev/null +++ b/examples/stock_tags.json @@ -0,0 +1,34982 @@ +[ + { + "code": "000972", + "name": "中基健康", + "tag": "大消费", + "reason": "", + "hidden_tag": null + }, + { + "code": "002269", + "name": "美邦服饰", + "tag": "大消费", + "reason": "" + }, + { + "code": "002762", + "name": "金发拉比", + "tag": "大消费", + "reason": "" + }, + { + "code": "603170", + "name": "宝立食品", + "tag": "大消费", + "reason": "", + "hidden_tag": null + }, + { + "code": "600257", + "name": "大湖股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "002803", + "name": "吉宏股份", + "tag": "大消费", + "reason": "", + "hidden_tag": null + }, + { + "code": "002186", + "name": "全 聚 德", + "tag": "大消费", + "reason": "" + }, + { + "code": "600365", + "name": "ST通葡", + "tag": "大消费", + "reason": "" + }, + { + "code": "600252", + "name": "中恒集团", + "tag": "大消费", + "reason": "", + "hidden_tag": null + }, + { + "code": "002419", + "name": "天虹股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "600771", + "name": "广誉远", + "tag": "医药", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600655", + "name": "豫园股份", + "tag": "大消费", + "reason": "", + "hidden_tag": null + }, + { + "code": "000501", + "name": "武商集团", + "tag": "大消费", + "reason": "", + "hidden_tag": null + }, + { + "code": "600332", + "name": "白云山", + "tag": "大消费", + "reason": "", + "hidden_tag": null + }, + { + "code": "002183", + "name": "怡 亚 通", + "tag": "大消费", + "reason": "" + }, + { + "code": "002374", + "name": "中锐股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "002342", + "name": "巨力索具", + "tag": "大消费", + "reason": "" + }, + { + "code": "600382", + "name": "广东明珠", + "tag": "大消费", + "reason": "" + }, + { + "code": "600696", + "name": "岩石股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "603919", + "name": "金徽酒", + "tag": "大消费", + "reason": "" + }, + { + "code": "603779", + "name": "威龙股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "603777", + "name": "来伊份", + "tag": "大消费", + "reason": "", + "hidden_tag": null + }, + { + "code": "600059", + "name": "古越龙山", + "tag": "大消费", + "reason": "" + }, + { + "code": "000860", + "name": "顺鑫农业", + "tag": "大消费", + "reason": "" + }, + { + "code": "600467", + "name": "好当家", + "tag": "大消费", + "reason": "" + }, + { + "code": "603299", + "name": "苏盐井神", + "tag": "大消费", + "reason": "" + }, + { + "code": "000995", + "name": "*ST皇台", + "tag": "大消费", + "reason": "" + }, + { + "code": "002646", + "name": "天佑德酒", + "tag": "大消费", + "reason": "" + }, + { + "code": "000596", + "name": "古井贡酒", + "tag": "大消费", + "reason": "" + }, + { + "code": "600559", + "name": "老白干酒", + "tag": "大消费", + "reason": "" + }, + { + "code": "600702", + "name": "舍得酒业", + "tag": "大消费", + "reason": "" + }, + { + "code": "601579", + "name": "会稽山", + "tag": "大消费", + "reason": "" + }, + { + "code": "600153", + "name": "建发股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "603589", + "name": "口子窖", + "tag": "大消费", + "reason": "" + }, + { + "code": "600809", + "name": "山西汾酒", + "tag": "大消费", + "reason": "" + }, + { + "code": "600197", + "name": "伊力特", + "tag": "大消费", + "reason": "" + }, + { + "code": "600238", + "name": "海南椰岛", + "tag": "大消费", + "reason": "" + }, + { + "code": "000568", + "name": "泸州老窖", + "tag": "大消费", + "reason": "" + }, + { + "code": "603198", + "name": "迎驾贡酒", + "tag": "大消费", + "reason": "" + }, + { + "code": "600199", + "name": "金种子酒", + "tag": "大消费", + "reason": "" + }, + { + "code": "000930", + "name": "中粮科技", + "tag": "大消费", + "reason": "", + "hidden_tag": "中字头" + }, + { + "code": "600779", + "name": "水井坊", + "tag": "大消费", + "reason": "" + }, + { + "code": "600519", + "name": "贵州茅台", + "tag": "大消费", + "reason": "" + }, + { + "code": "600381", + "name": "青海春天", + "tag": "大消费", + "reason": "", + "hidden_tag": null + }, + { + "code": "603369", + "name": "今世缘", + "tag": "大消费", + "reason": "" + }, + { + "code": "000858", + "name": "五 粮 液", + "tag": "大消费", + "reason": "" + }, + { + "code": "000799", + "name": "酒鬼酒", + "tag": "大消费", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "002304", + "name": "洋河股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "600679", + "name": "上海凤凰", + "tag": "大消费", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "603983", + "name": "丸美股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "301187", + "name": "欧圣电气", + "tag": "大消费", + "reason": "" + }, + { + "code": "600493", + "name": "凤竹纺织", + "tag": "大消费", + "reason": "" + }, + { + "code": "605136", + "name": "丽人丽妆", + "tag": "大消费", + "reason": "", + "hidden_tag": null + }, + { + "code": "600448", + "name": "华纺股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "600439", + "name": "瑞贝卡", + "tag": "大消费", + "reason": "" + }, + { + "code": "000521", + "name": "长虹美菱", + "tag": "大消费", + "reason": "" + }, + { + "code": "603499", + "name": "翔港科技", + "tag": "大消费", + "reason": "" + }, + { + "code": "301193", + "name": "家联科技", + "tag": "大消费", + "reason": "" + }, + { + "code": "600935", + "name": "华塑股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "301376", + "name": "致欧科技", + "tag": "大消费", + "reason": "", + "hidden_tag": "次新股" + }, + { + "code": "603863", + "name": "松炀资源", + "tag": "大消费", + "reason": "" + }, + { + "code": "002615", + "name": "哈尔斯", + "tag": "大消费", + "reason": "" + }, + { + "code": "000850", + "name": "华茂股份", + "tag": "大消费", + "reason": "", + "hidden_tag": null + }, + { + "code": "001209", + "name": "洪兴股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "001216", + "name": "华瓷股份", + "tag": "大消费", + "reason": "", + "hidden_tag": null + }, + { + "code": "001368", + "name": "通达创智", + "tag": "大消费", + "reason": "" + }, + { + "code": "001238", + "name": "浙江正特", + "tag": "大消费", + "reason": "", + "hidden_tag": "次新股" + }, + { + "code": "000025", + "name": "特力A", + "tag": "大消费", + "reason": "" + }, + { + "code": "603661", + "name": "恒林股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "002059", + "name": "云南旅游", + "tag": "大消费", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "001387", + "name": "雪祺电气", + "tag": "大消费", + "reason": "", + "hidden_tag": "次新股" + }, + { + "code": "603600", + "name": "永艺股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "600156", + "name": "华升股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "601113", + "name": "华鼎股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "002731", + "name": "萃华珠宝", + "tag": "大消费", + "reason": "", + "hidden_tag": null + }, + { + "code": "600400", + "name": "红豆股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "605180", + "name": "华生科技", + "tag": "大消费", + "reason": "" + }, + { + "code": "000592", + "name": "平潭发展", + "tag": "大消费", + "reason": "" + }, + { + "code": "002357", + "name": "富临运业", + "tag": "大消费", + "reason": "" + }, + { + "code": "603958", + "name": "哈森股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "002969", + "name": "嘉美包装", + "tag": "大消费", + "reason": "" + }, + { + "code": "000428", + "name": "华天酒店", + "tag": "大消费", + "reason": "" + }, + { + "code": "600882", + "name": "妙可蓝多", + "tag": "大消费", + "reason": "", + "hidden_tag": null + }, + { + "code": "600697", + "name": "欧亚集团", + "tag": "大消费", + "reason": "", + "hidden_tag": null + }, + { + "code": "300740", + "name": "水羊股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "000888", + "name": "峨眉山A", + "tag": "大消费", + "reason": "" + }, + { + "code": "002356", + "name": "赫美集团", + "tag": "大消费", + "reason": "" + }, + { + "code": "000610", + "name": "西安旅游", + "tag": "大消费", + "reason": "" + }, + { + "code": "605188", + "name": "国光连锁", + "tag": "大消费", + "reason": "", + "hidden_tag": null + }, + { + "code": "000861", + "name": "海印股份", + "tag": "大消费", + "reason": "", + "hidden_tag": null + }, + { + "code": "603199", + "name": "九华旅游", + "tag": "大消费", + "reason": "" + }, + { + "code": "600706", + "name": "曲江文旅", + "tag": "大消费", + "reason": "" + }, + { + "code": "600824", + "name": "益民集团", + "tag": "大消费", + "reason": "", + "hidden_tag": null + }, + { + "code": "002820", + "name": "桂发祥", + "tag": "大消费", + "reason": "", + "hidden_tag": null + }, + { + "code": "603719", + "name": "良品铺子", + "tag": "大消费", + "reason": "", + "hidden_tag": null + }, + { + "code": "003000", + "name": "劲仔食品", + "tag": "大消费", + "reason": "", + "hidden_tag": null + }, + { + "code": "600828", + "name": "茂业商业", + "tag": "大消费", + "reason": "" + }, + { + "code": "002561", + "name": "徐家汇", + "tag": "大消费", + "reason": "", + "hidden_tag": null + }, + { + "code": "600280", + "name": "中央商场", + "tag": "大消费", + "reason": "", + "hidden_tag": null + }, + { + "code": "000679", + "name": "大连友谊", + "tag": "大消费", + "reason": "", + "hidden_tag": null + }, + { + "code": "300783", + "name": "三只松鼠", + "tag": "大消费", + "reason": "", + "hidden_tag": null + }, + { + "code": "000017", + "name": "深中华A", + "tag": "大消费", + "reason": "", + "hidden_tag": null + }, + { + "code": "000523", + "name": "红棉股份", + "tag": "大消费", + "reason": "", + "hidden_tag": null + }, + { + "code": "603518", + "name": "锦泓集团", + "tag": "大消费", + "reason": "" + }, + { + "code": "603877", + "name": "太平鸟", + "tag": "大消费", + "reason": "" + }, + { + "code": "601086", + "name": "国芳集团", + "tag": "大消费", + "reason": "", + "hidden_tag": null + }, + { + "code": "603231", + "name": "索宝蛋白", + "tag": "大消费", + "reason": "", + "hidden_tag": "次新股" + }, + { + "code": "002719", + "name": "麦趣尔", + "tag": "大消费", + "reason": "", + "hidden_tag": null + }, + { + "code": "000558", + "name": "莱茵体育", + "tag": "大消费", + "reason": "", + "hidden_tag": null + }, + { + "code": "600628", + "name": "新世界", + "tag": "大消费", + "reason": "", + "hidden_tag": null + }, + { + "code": "600630", + "name": "龙头股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "600250", + "name": "南京商旅", + "tag": "大消费", + "reason": "" + }, + { + "code": "605080", + "name": "浙江自然", + "tag": "大消费", + "reason": "" + }, + { + "code": "603099", + "name": "长白山", + "tag": "大消费", + "reason": "" + }, + { + "code": "002033", + "name": "丽江股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "600593", + "name": "大连圣亚", + "tag": "大消费", + "reason": "" + }, + { + "code": "002780", + "name": "三夫户外", + "tag": "大消费", + "reason": "" + }, + { + "code": "000978", + "name": "桂林旅游", + "tag": "大消费", + "reason": "" + }, + { + "code": "688363", + "name": "华熙生物", + "tag": "大消费", + "reason": "" + }, + { + "code": "002612", + "name": "朗姿股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "300896", + "name": "爱美客", + "tag": "大消费", + "reason": "" + }, + { + "code": "000715", + "name": "中兴商业", + "tag": "大消费", + "reason": "", + "hidden_tag": null + }, + { + "code": "603536", + "name": "惠发食品", + "tag": "大消费", + "reason": "", + "hidden_tag": null + }, + { + "code": "002495", + "name": "佳隆股份", + "tag": "大消费", + "reason": "", + "hidden_tag": null + }, + { + "code": "601566", + "name": "九牧王", + "tag": "大消费", + "reason": "" + }, + { + "code": "603711", + "name": "香飘飘", + "tag": "大消费", + "reason": "", + "hidden_tag": null + }, + { + "code": "688577", + "name": "浙海德曼", + "tag": "智能机器", + "reason": "", + "hidden_tag": "次新股" + }, + { + "code": "003025", + "name": "思进智能", + "tag": "智能机器", + "reason": "" + }, + { + "code": "603289", + "name": "泰瑞机器", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002708", + "name": "光洋股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300276", + "name": "三丰智能", + "tag": "智能机器", + "reason": "", + "hidden_tag": null + }, + { + "code": "603211", + "name": "晋拓股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "603095", + "name": "越剑智能", + "tag": "智能机器", + "reason": "" + }, + { + "code": "603015", + "name": "弘讯科技", + "tag": "智能机器", + "reason": "", + "hidden_tag": null + }, + { + "code": "301083", + "name": "百胜智能", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002931", + "name": "锋龙股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002611", + "name": "东方精工", + "tag": "智能机器", + "reason": "", + "hidden_tag": null + }, + { + "code": "002527", + "name": "新时达", + "tag": "智能机器", + "reason": "", + "hidden_tag": null + }, + { + "code": "002523", + "name": "天桥起重", + "tag": "智能机器", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "603767", + "name": "中马传动", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002031", + "name": "巨轮智能", + "tag": "智能机器", + "reason": "", + "hidden_tag": null + }, + { + "code": "002421", + "name": "达实智能", + "tag": "智能机器", + "reason": "", + "hidden_tag": null + }, + { + "code": "002337", + "name": "赛象科技", + "tag": "智能机器", + "reason": "", + "hidden_tag": null + }, + { + "code": "002347", + "name": "泰尔股份", + "tag": "智能机器", + "reason": "", + "hidden_tag": null + }, + { + "code": "002403", + "name": "爱仕达", + "tag": "智能机器", + "reason": "", + "hidden_tag": null + }, + { + "code": "603990", + "name": "麦迪科技", + "tag": "赛道", + "reason": "", + "hidden_tag": null + }, + { + "code": "603810", + "name": "丰山集团", + "tag": "赛道", + "reason": "", + "hidden_tag": null + }, + { + "code": "601886", + "name": "江河集团", + "tag": "赛道", + "reason": "" + }, + { + "code": "002897", + "name": "意华股份", + "tag": "赛道", + "reason": "" + }, + { + "code": "002805", + "name": "丰元股份", + "tag": "赛道", + "reason": "", + "hidden_tag": null + }, + { + "code": "603959", + "name": "百利科技", + "tag": "赛道", + "reason": "", + "hidden_tag": null + }, + { + "code": "002333", + "name": "罗普斯金", + "tag": "赛道", + "reason": "" + }, + { + "code": "003027", + "name": "同兴环保", + "tag": "赛道", + "reason": "" + }, + { + "code": "002129", + "name": "TCL中环", + "tag": "赛道", + "reason": "" + }, + { + "code": "002459", + "name": "晶澳科技", + "tag": "赛道", + "reason": "" + }, + { + "code": "600732", + "name": "爱旭股份", + "tag": "赛道", + "reason": "" + }, + { + "code": "605366", + "name": "宏柏新材", + "tag": "赛道", + "reason": "" + }, + { + "code": "000635", + "name": "英力特", + "tag": "赛道", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "603026", + "name": "胜华新材", + "tag": "赛道", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "605399", + "name": "晨光新材", + "tag": "赛道", + "reason": "" + }, + { + "code": "002211", + "name": "宏达新材", + "tag": "赛道", + "reason": "" + }, + { + "code": "600207", + "name": "安彩高科", + "tag": "赛道", + "reason": "" + }, + { + "code": "002487", + "name": "大金重工", + "tag": "赛道", + "reason": "" + }, + { + "code": "001239", + "name": "永达股份", + "tag": "赛道", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "603312", + "name": "西典新能", + "tag": "赛道", + "reason": "", + "hidden_tag": "次新股" + }, + { + "code": "688503", + "name": "聚和材料", + "tag": "赛道", + "reason": "", + "hidden_tag": "次新股" + }, + { + "code": "002868", + "name": "绿康生化", + "tag": "赛道", + "reason": "" + }, + { + "code": "600819", + "name": "耀皮玻璃", + "tag": "赛道", + "reason": "" + }, + { + "code": "603398", + "name": "沐邦高科", + "tag": "赛道", + "reason": "" + }, + { + "code": "002466", + "name": "天齐锂业", + "tag": "赛道", + "reason": "", + "hidden_tag": null + }, + { + "code": "603051", + "name": "鹿山新材", + "tag": "赛道", + "reason": "" + }, + { + "code": "688408", + "name": "中信博", + "tag": "赛道", + "reason": "" + }, + { + "code": "600499", + "name": "科达制造", + "tag": "赛道", + "reason": "", + "hidden_tag": null + }, + { + "code": "688472", + "name": "阿特斯", + "tag": "赛道", + "reason": "", + "hidden_tag": "次新股" + }, + { + "code": "001212", + "name": "中旗新材", + "tag": "赛道", + "reason": "" + }, + { + "code": "605389", + "name": "长龄液压", + "tag": "赛道", + "reason": "" + }, + { + "code": "002176", + "name": "江特电机", + "tag": "赛道", + "reason": "", + "hidden_tag": null + }, + { + "code": "000546", + "name": "金圆股份", + "tag": "赛道", + "reason": "", + "hidden_tag": null + }, + { + "code": "301152", + "name": "天力锂能", + "tag": "赛道", + "reason": "", + "hidden_tag": "次新股" + }, + { + "code": "002132", + "name": "恒星科技", + "tag": "赛道", + "reason": "" + }, + { + "code": "603276", + "name": "恒兴新材", + "tag": "赛道", + "reason": "", + "hidden_tag": "次新股" + }, + { + "code": "002738", + "name": "中矿资源", + "tag": "赛道", + "reason": "", + "hidden_tag": null + }, + { + "code": "688063", + "name": "派能科技", + "tag": "赛道", + "reason": "", + "hidden_tag": null + }, + { + "code": "002641", + "name": "公元股份", + "tag": "赛道", + "reason": "" + }, + { + "code": "300769", + "name": "德方纳米", + "tag": "赛道", + "reason": "", + "hidden_tag": null + }, + { + "code": "002192", + "name": "融捷股份", + "tag": "赛道", + "reason": "", + "hidden_tag": null + }, + { + "code": "000657", + "name": "中钨高新", + "tag": "赛道", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "002667", + "name": "威领股份", + "tag": "赛道", + "reason": "" + }, + { + "code": "000982", + "name": "中银绒业", + "tag": "赛道", + "reason": "", + "hidden_tag": null + }, + { + "code": "002326", + "name": "永太科技", + "tag": "赛道", + "reason": "", + "hidden_tag": null + }, + { + "code": "002665", + "name": "首航高科", + "tag": "赛道", + "reason": "" + }, + { + "code": "000695", + "name": "滨海能源", + "tag": "赛道", + "reason": "" + }, + { + "code": "600586", + "name": "金晶科技", + "tag": "赛道", + "reason": "", + "hidden_tag": null + }, + { + "code": "603097", + "name": "江苏华辰", + "tag": "赛道", + "reason": "" + }, + { + "code": "002012", + "name": "凯恩股份", + "tag": "赛道", + "reason": "", + "hidden_tag": null + }, + { + "code": "000762", + "name": "西藏矿业", + "tag": "赛道", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "300125", + "name": "聆达股份", + "tag": "赛道", + "reason": "", + "hidden_tag": null + }, + { + "code": "605378", + "name": "野马电池", + "tag": "赛道", + "reason": "" + }, + { + "code": "605158", + "name": "华达新材", + "tag": "赛道", + "reason": "" + }, + { + "code": "002756", + "name": "永兴材料", + "tag": "赛道", + "reason": "", + "hidden_tag": null + }, + { + "code": "603200", + "name": "上海洗霸", + "tag": "赛道", + "reason": "" + }, + { + "code": "603663", + "name": "三祥新材", + "tag": "赛道", + "reason": "" + }, + { + "code": "000155", + "name": "川能动力", + "tag": "赛道", + "reason": "", + "hidden_tag": null + }, + { + "code": "300473", + "name": "德尔股份", + "tag": "赛道", + "reason": "" + }, + { + "code": "301511", + "name": "德福科技", + "tag": "赛道", + "reason": "", + "hidden_tag": "次新股" + }, + { + "code": "300530", + "name": "领湃科技", + "tag": "赛道", + "reason": "", + "hidden_tag": null + }, + { + "code": "002882", + "name": "金龙羽", + "tag": "赛道", + "reason": "", + "hidden_tag": null + }, + { + "code": "002341", + "name": "新纶新材", + "tag": "赛道", + "reason": "", + "hidden_tag": null + }, + { + "code": "301238", + "name": "瑞泰新材", + "tag": "赛道", + "reason": "", + "hidden_tag": null + }, + { + "code": "002610", + "name": "爱康科技", + "tag": "赛道", + "reason": "" + }, + { + "code": "688717", + "name": "艾罗能源", + "tag": "赛道", + "reason": "", + "hidden_tag": "次新股" + }, + { + "code": "600615", + "name": "丰华股份", + "tag": "赛道", + "reason": "" + }, + { + "code": "300842", + "name": "帝科股份", + "tag": "赛道", + "reason": "" + }, + { + "code": "600805", + "name": "悦达投资", + "tag": "赛道", + "reason": "" + }, + { + "code": "688032", + "name": "禾迈股份", + "tag": "赛道", + "reason": "" + }, + { + "code": "300751", + "name": "迈为股份", + "tag": "赛道", + "reason": "" + }, + { + "code": "002660", + "name": "茂硕电源", + "tag": "赛道", + "reason": "", + "hidden_tag": null + }, + { + "code": "002323", + "name": "雅博股份", + "tag": "赛道", + "reason": "" + }, + { + "code": "301168", + "name": "通灵股份", + "tag": "赛道", + "reason": "" + }, + { + "code": "300093", + "name": "金刚光伏", + "tag": "赛道", + "reason": "" + }, + { + "code": "601096", + "name": "宏盛华源", + "tag": "赛道", + "reason": "", + "hidden_tag": "次新股" + }, + { + "code": "000821", + "name": "京山轻机", + "tag": "赛道", + "reason": "", + "hidden_tag": null + }, + { + "code": "300763", + "name": "锦浪科技", + "tag": "赛道", + "reason": "" + }, + { + "code": "002329", + "name": "皇氏集团", + "tag": "赛道", + "reason": "", + "hidden_tag": null + }, + { + "code": "003022", + "name": "联泓新科", + "tag": "赛道", + "reason": "", + "hidden_tag": null + }, + { + "code": "001269", + "name": "欧晶科技", + "tag": "赛道", + "reason": "", + "hidden_tag": "次新股" + }, + { + "code": "688390", + "name": "固德威", + "tag": "赛道", + "reason": "" + }, + { + "code": "603628", + "name": "清源股份", + "tag": "赛道", + "reason": "" + }, + { + "code": "605117", + "name": "德业股份", + "tag": "赛道", + "reason": "" + }, + { + "code": "002306", + "name": "中科云网", + "tag": "赛道", + "reason": "" + }, + { + "code": "002578", + "name": "闽发铝业", + "tag": "赛道", + "reason": "", + "hidden_tag": null + }, + { + "code": "002783", + "name": "凯龙股份", + "tag": "赛道", + "reason": "" + }, + { + "code": "603333", + "name": "尚纬股份", + "tag": "赛道", + "reason": "" + }, + { + "code": "603577", + "name": "汇金通", + "tag": "赛道", + "reason": "" + }, + { + "code": "002529", + "name": "海源复材", + "tag": "赛道", + "reason": "" + }, + { + "code": "002634", + "name": "棒杰股份", + "tag": "赛道", + "reason": "" + }, + { + "code": "603396", + "name": "金辰股份", + "tag": "赛道", + "reason": "" + }, + { + "code": "301266", + "name": "宇邦新材", + "tag": "赛道", + "reason": "", + "hidden_tag": null + }, + { + "code": "603212", + "name": "赛伍技术", + "tag": "赛道", + "reason": "", + "hidden_tag": null + }, + { + "code": "301278", + "name": "快可电子", + "tag": "赛道", + "reason": "" + }, + { + "code": "603739", + "name": "蔚蓝生物", + "tag": "医药", + "reason": "" + }, + { + "code": "001366", + "name": "播恩集团", + "tag": "医药", + "reason": "", + "hidden_tag": "次新股" + }, + { + "code": "000078", + "name": "海王生物", + "tag": "医药", + "reason": "", + "hidden_tag": null + }, + { + "code": "301526", + "name": "国际复材", + "tag": "PEEK材料", + "reason": "", + "hidden_tag": "次新股" + }, + { + "code": "301076", + "name": "新瀚新材", + "tag": "PEEK材料", + "reason": "" + }, + { + "code": "002915", + "name": "中欣氟材", + "tag": "PEEK材料", + "reason": "" + }, + { + "code": "603392", + "name": "万泰生物", + "tag": "医药", + "reason": "", + "hidden_tag": null + }, + { + "code": "001300", + "name": "三柏硕", + "tag": "大消费", + "reason": "", + "hidden_tag": "次新股" + }, + { + "code": "600678", + "name": "四川金顶", + "tag": "房地产", + "reason": "" + }, + { + "code": "002855", + "name": "捷荣技术", + "tag": "消费电子", + "reason": "", + "hidden_tag": null + }, + { + "code": "002888", + "name": "惠威科技", + "tag": "消费电子", + "reason": "" + }, + { + "code": "002981", + "name": "朝阳科技", + "tag": "消费电子", + "reason": "", + "hidden_tag": null + }, + { + "code": "002577", + "name": "雷柏科技", + "tag": "消费电子", + "reason": "", + "hidden_tag": null + }, + { + "code": "002786", + "name": "银宝山新", + "tag": "消费电子", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "300100", + "name": "双林股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "002703", + "name": "浙江世宝", + "tag": "汽车", + "reason": "" + }, + { + "code": "603529", + "name": "爱玛科技", + "tag": "汽车", + "reason": "", + "hidden_tag": null + }, + { + "code": "603390", + "name": "通达电气", + "tag": "汽车", + "reason": "" + }, + { + "code": "603107", + "name": "上海汽配", + "tag": "汽车", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "603040", + "name": "新坐标", + "tag": "汽车", + "reason": "" + }, + { + "code": "600480", + "name": "凌云股份", + "tag": "汽车", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "002976", + "name": "瑞玛精密", + "tag": "汽车", + "reason": "", + "hidden_tag": null + }, + { + "code": "601975", + "name": "招商南油", + "tag": "公用", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "601022", + "name": "宁波远洋", + "tag": "公用", + "reason": "", + "hidden_tag": "次新股" + }, + { + "code": "600178", + "name": "东安动力", + "tag": "汽车", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "603266", + "name": "天龙股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "300492", + "name": "华图山鼎", + "tag": "教育", + "reason": "" + }, + { + "code": "605020", + "name": "永和股份", + "tag": "化工", + "reason": "" + }, + { + "code": "000691", + "name": "亚太实业", + "tag": "化工", + "reason": "" + }, + { + "code": "300505", + "name": "川金诺", + "tag": "化工", + "reason": "", + "hidden_tag": null + }, + { + "code": "002442", + "name": "龙星化工", + "tag": "化工", + "reason": "" + }, + { + "code": "000422", + "name": "湖北宜化", + "tag": "化工", + "reason": "", + "hidden_tag": null + }, + { + "code": "600610", + "name": "中毅达", + "tag": "化工", + "reason": "", + "hidden_tag": null + }, + { + "code": "000985", + "name": "大庆华科", + "tag": "化工", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "301000", + "name": "肇民科技", + "tag": "汽车", + "reason": "" + }, + { + "code": "003007", + "name": "直真科技", + "tag": "AI", + "reason": "", + "hidden_tag": "" + }, + { + "code": "300620", + "name": "光库科技", + "tag": "AI", + "reason": "", + "hidden_tag": "" + }, + { + "code": "603825", + "name": "华扬联众", + "tag": "AI", + "reason": "", + "hidden_tag": null + }, + { + "code": "300502", + "name": "新易盛", + "tag": "AI", + "reason": "", + "hidden_tag": null + }, + { + "code": "300418", + "name": "昆仑万维", + "tag": "AI", + "reason": "", + "hidden_tag": null + }, + { + "code": "300394", + "name": "天孚通信", + "tag": "AI", + "reason": "", + "hidden_tag": null + }, + { + "code": "000070", + "name": "特发信息", + "tag": "AI", + "reason": "", + "hidden_tag": "" + }, + { + "code": "002230", + "name": "科大讯飞", + "tag": "AI", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "600186", + "name": "莲花健康", + "tag": "AI", + "reason": "", + "hidden_tag": null + }, + { + "code": "300949", + "name": "奥雅股份", + "tag": "AI", + "reason": "", + "hidden_tag": null + }, + { + "code": "002771", + "name": "真视通", + "tag": "AI", + "reason": "", + "hidden_tag": null + }, + { + "code": "600449", + "name": "宁夏建材", + "tag": "AI", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "000628", + "name": "高新发展", + "tag": "AI", + "reason": "", + "hidden_tag": null + }, + { + "code": "688685", + "name": "迈信林", + "tag": "AI", + "reason": "" + }, + { + "code": "000936", + "name": "华西股份", + "tag": "AI", + "reason": "" + }, + { + "code": "603083", + "name": "剑桥科技", + "tag": "AI", + "reason": "" + }, + { + "code": "300570", + "name": "太辰光", + "tag": "AI", + "reason": "" + }, + { + "code": "301205", + "name": "联特科技", + "tag": "AI", + "reason": "", + "hidden_tag": "次新股" + }, + { + "code": "603206", + "name": "嘉环科技", + "tag": "鸿蒙", + "reason": "", + "hidden_tag": null + }, + { + "code": "300941", + "name": "创识科技", + "tag": "鸿蒙", + "reason": "" + }, + { + "code": "605365", + "name": "立达信", + "tag": "鸿蒙", + "reason": "", + "hidden_tag": null + }, + { + "code": "603038", + "name": "华立股份", + "tag": "鸿蒙", + "reason": "", + "hidden_tag": null + }, + { + "code": "000676", + "name": "智度股份", + "tag": "鸿蒙", + "reason": "", + "hidden_tag": null + }, + { + "code": "300531", + "name": "优博讯", + "tag": "鸿蒙", + "reason": "", + "hidden_tag": null + }, + { + "code": "002197", + "name": "证通电子", + "tag": "鸿蒙", + "reason": "", + "hidden_tag": null + }, + { + "code": "000158", + "name": "常山北明", + "tag": "鸿蒙", + "reason": "", + "hidden_tag": null + }, + { + "code": "003029", + "name": "吉大正元", + "tag": "鸿蒙", + "reason": "", + "hidden_tag": null + }, + { + "code": "300456", + "name": "赛微电子", + "tag": "鸿蒙", + "reason": "", + "hidden_tag": null + }, + { + "code": "002178", + "name": "延华智能", + "tag": "鸿蒙", + "reason": "", + "hidden_tag": null + }, + { + "code": "301337", + "name": "亚华电子", + "tag": "鸿蒙", + "reason": "", + "hidden_tag": "次新股" + }, + { + "code": "002819", + "name": "东方中科", + "tag": "鸿蒙", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "003032", + "name": "传智教育", + "tag": "鸿蒙", + "reason": "", + "hidden_tag": null + }, + { + "code": "688609", + "name": "九联科技", + "tag": "鸿蒙", + "reason": "", + "hidden_tag": null + }, + { + "code": "301236", + "name": "软通动力", + "tag": "鸿蒙", + "reason": "", + "hidden_tag": null + }, + { + "code": "001339", + "name": "智微智能", + "tag": "鸿蒙", + "reason": "", + "hidden_tag": "次新股" + }, + { + "code": "300663", + "name": "科蓝软件", + "tag": "鸿蒙", + "reason": "", + "hidden_tag": null + }, + { + "code": "002181", + "name": "粤传媒", + "tag": "传媒", + "reason": "" + }, + { + "code": "600892", + "name": "大晟文化", + "tag": "传媒", + "reason": "" + }, + { + "code": "603721", + "name": "中广天择", + "tag": "传媒", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "603230", + "name": "内蒙新华", + "tag": "传媒", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600825", + "name": "新华传媒", + "tag": "传媒", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000793", + "name": "华闻集团", + "tag": "传媒", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "600088", + "name": "中视传媒", + "tag": "传媒", + "reason": "", + "hidden_tag": "中字头" + }, + { + "code": "300781", + "name": "因赛集团", + "tag": "传媒", + "reason": "", + "hidden_tag": null + }, + { + "code": "603598", + "name": "引力传媒", + "tag": "传媒", + "reason": "", + "hidden_tag": null + }, + { + "code": "605577", + "name": "龙版传媒", + "tag": "传媒", + "reason": "", + "hidden_tag": null + }, + { + "code": "603729", + "name": "龙韵股份", + "tag": "传媒", + "reason": "", + "hidden_tag": null + }, + { + "code": "603608", + "name": "天创时尚", + "tag": "传媒", + "reason": "互联网营销" + }, + { + "code": "601595", + "name": "上海电影", + "tag": "传媒", + "reason": "互联网营销", + "hidden_tag": null + }, + { + "code": "300364", + "name": "中文在线", + "tag": "传媒", + "reason": "短剧", + "hidden_tag": null + }, + { + "code": "002103", + "name": "广博股份", + "tag": "传媒", + "reason": "" + }, + { + "code": "301302", + "name": "华如科技", + "tag": "VR", + "reason": "" + }, + { + "code": "300081", + "name": "恒信东方", + "tag": "VR", + "reason": "", + "hidden_tag": null + }, + { + "code": "300968", + "name": "格林精密", + "tag": "VR", + "reason": "", + "hidden_tag": null + }, + { + "code": "300076", + "name": "GQY视讯", + "tag": "VR", + "reason": "", + "hidden_tag": null + }, + { + "code": "002635", + "name": "安洁科技", + "tag": "VR", + "reason": "", + "hidden_tag": null + }, + { + "code": "002587", + "name": "奥拓电子", + "tag": "VR", + "reason": "" + }, + { + "code": "300691", + "name": "联合光电", + "tag": "VR", + "reason": "", + "hidden_tag": null + }, + { + "code": "300793", + "name": "佳禾智能", + "tag": "VR", + "reason": "", + "hidden_tag": null + }, + { + "code": "000810", + "name": "创维数字", + "tag": "VR", + "reason": "" + }, + { + "code": "002189", + "name": "中光学", + "tag": "VR", + "reason": "", + "hidden_tag": "中字头" + }, + { + "code": "002975", + "name": "博杰股份", + "tag": "VR", + "reason": "" + }, + { + "code": "603052", + "name": "可川科技", + "tag": "VR", + "reason": "", + "hidden_tag": "次新股" + }, + { + "code": "002241", + "name": "歌尔股份", + "tag": "VR", + "reason": "", + "hidden_tag": null + }, + { + "code": "001314", + "name": "亿道信息", + "tag": "VR", + "reason": "", + "hidden_tag": "次新股" + }, + { + "code": "002395", + "name": "双象股份", + "tag": "VR", + "reason": "", + "hidden_tag": null + }, + { + "code": "300556", + "name": "丝路视觉", + "tag": "VR", + "reason": "" + }, + { + "code": "002952", + "name": "亚世光电", + "tag": "VR", + "reason": "" + }, + { + "code": "300232", + "name": "洲明科技", + "tag": "VR", + "reason": "" + }, + { + "code": "300323", + "name": "华灿光电", + "tag": "VR", + "reason": "" + }, + { + "code": "301383", + "name": "天键股份", + "tag": "VR", + "reason": "", + "hidden_tag": "次新股" + }, + { + "code": "002655", + "name": "共达电声", + "tag": "VR", + "reason": "", + "hidden_tag": null + }, + { + "code": "688496", + "name": "清越科技", + "tag": "VR", + "reason": "", + "hidden_tag": "次新股" + }, + { + "code": "605218", + "name": "伟时电子", + "tag": "VR", + "reason": "" + }, + { + "code": "605178", + "name": "时空科技", + "tag": "VR", + "reason": "" + }, + { + "code": "605118", + "name": "力鼎光电", + "tag": "VR", + "reason": "" + }, + { + "code": "603722", + "name": "阿科力", + "tag": "VR", + "reason": "" + }, + { + "code": "300269", + "name": "联建光电", + "tag": "VR", + "reason": "" + }, + { + "code": "300264", + "name": "佳创视讯", + "tag": "VR", + "reason": "" + }, + { + "code": "603466", + "name": "风语筑", + "tag": "VR", + "reason": "", + "hidden_tag": null + }, + { + "code": "002876", + "name": "三利谱", + "tag": "VR", + "reason": "" + }, + { + "code": "002962", + "name": "五方光电", + "tag": "VR", + "reason": "" + }, + { + "code": "300812", + "name": "易天股份", + "tag": "VR", + "reason": "", + "hidden_tag": null + }, + { + "code": "000016", + "name": "深康佳A", + "tag": "VR", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "002449", + "name": "国星光电", + "tag": "VR", + "reason": "" + }, + { + "code": "002517", + "name": "恺英网络", + "tag": "VR", + "reason": "" + }, + { + "code": "002632", + "name": "道明光学", + "tag": "VR", + "reason": "", + "hidden_tag": null + }, + { + "code": "301011", + "name": "华立科技", + "tag": "VR", + "reason": "" + }, + { + "code": "600071", + "name": "凤凰光学", + "tag": "VR", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "003015", + "name": "日久光电", + "tag": "VR", + "reason": "" + }, + { + "code": "603373", + "name": "安邦护卫", + "tag": "安防", + "reason": "", + "hidden_tag": "次新股" + }, + { + "code": "603021", + "name": "山东华鹏", + "tag": "安防", + "reason": "" + }, + { + "code": "600506", + "name": "统一股份", + "tag": "AI", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "600689", + "name": "上海三毛", + "tag": "外销", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "600228", + "name": "返利科技", + "tag": "外销", + "reason": "" + }, + { + "code": "003003", + "name": "天元股份", + "tag": "外销", + "reason": "" + }, + { + "code": "001379", + "name": "腾达科技", + "tag": "外销", + "reason": "", + "hidden_tag": "次新股" + }, + { + "code": "300729", + "name": "乐歌股份", + "tag": "外销", + "reason": "" + }, + { + "code": "603280", + "name": "南方路机", + "tag": "外销", + "reason": "", + "hidden_tag": "次新股" + }, + { + "code": "301367", + "name": "怡和嘉业", + "tag": "外销", + "reason": "", + "hidden_tag": "次新股" + }, + { + "code": "002423", + "name": "中粮资本", + "tag": "金融", + "reason": "", + "hidden_tag": "中字头" + }, + { + "code": "600830", + "name": "香溢融通", + "tag": "金融", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "603093", + "name": "南华期货", + "tag": "金融", + "reason": "" + }, + { + "code": "600705", + "name": "中航产融", + "tag": "金融", + "reason": "", + "hidden_tag": "中字头" + }, + { + "code": "600643", + "name": "爱建集团", + "tag": "金融", + "reason": "" + }, + { + "code": "000617", + "name": "中油资本", + "tag": "金融", + "reason": "", + "hidden_tag": "中字头" + }, + { + "code": "600390", + "name": "五矿资本", + "tag": "金融", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "001236", + "name": "弘业期货", + "tag": "金融", + "reason": "" + }, + { + "code": "600446", + "name": "金证股份", + "tag": "金融", + "reason": "" + }, + { + "code": "301315", + "name": "威士顿", + "tag": "金融", + "reason": "", + "hidden_tag": "次新股" + }, + { + "code": "601336", + "name": "新华保险", + "tag": "金融", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "601601", + "name": "中国太保", + "tag": "金融", + "reason": "", + "hidden_tag": "中字头" + }, + { + "code": "601319", + "name": "中国人保", + "tag": "金融", + "reason": "", + "hidden_tag": "中字头" + }, + { + "code": "000627", + "name": "天茂集团", + "tag": "金融", + "reason": "" + }, + { + "code": "601628", + "name": "中国人寿", + "tag": "金融", + "reason": "", + "hidden_tag": "中字头" + }, + { + "code": "601318", + "name": "中国平安", + "tag": "金融", + "reason": "", + "hidden_tag": "中字头" + }, + { + "code": "601136", + "name": "首创证券", + "tag": "金融", + "reason": "", + "hidden_tag": "次新股" + }, + { + "code": "601456", + "name": "国联证券", + "tag": "金融", + "reason": "" + }, + { + "code": "601059", + "name": "信达证券", + "tag": "金融", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "601555", + "name": "东吴证券", + "tag": "金融", + "reason": "" + }, + { + "code": "601099", + "name": "太平洋", + "tag": "金融", + "reason": "" + }, + { + "code": "002945", + "name": "华林证券", + "tag": "金融", + "reason": "" + }, + { + "code": "600999", + "name": "招商证券", + "tag": "金融", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "600906", + "name": "财达证券", + "tag": "金融", + "reason": "" + }, + { + "code": "600837", + "name": "海通证券", + "tag": "金融", + "reason": "" + }, + { + "code": "600095", + "name": "湘财股份", + "tag": "金融", + "reason": "" + }, + { + "code": "601688", + "name": "华泰证券", + "tag": "金融", + "reason": "" + }, + { + "code": "002939", + "name": "长城证券", + "tag": "金融", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "600061", + "name": "国投资本", + "tag": "金融", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "601211", + "name": "国泰君安", + "tag": "金融", + "reason": "" + }, + { + "code": "000783", + "name": "长江证券", + "tag": "金融", + "reason": "" + }, + { + "code": "601990", + "name": "南京证券", + "tag": "金融", + "reason": "" + }, + { + "code": "601066", + "name": "中信建投", + "tag": "金融", + "reason": "" + }, + { + "code": "002500", + "name": "山西证券", + "tag": "金融", + "reason": "" + }, + { + "code": "000776", + "name": "广发证券", + "tag": "金融", + "reason": "" + }, + { + "code": "000750", + "name": "国海证券", + "tag": "金融", + "reason": "" + }, + { + "code": "601377", + "name": "兴业证券", + "tag": "金融", + "reason": "" + }, + { + "code": "600958", + "name": "东方证券", + "tag": "金融", + "reason": "" + }, + { + "code": "601878", + "name": "浙商证券", + "tag": "金融", + "reason": "" + }, + { + "code": "002797", + "name": "第一创业", + "tag": "金融", + "reason": "" + }, + { + "code": "002736", + "name": "国信证券", + "tag": "金融", + "reason": "" + }, + { + "code": "000686", + "name": "东北证券", + "tag": "金融", + "reason": "" + }, + { + "code": "600918", + "name": "中泰证券", + "tag": "金融", + "reason": "" + }, + { + "code": "601108", + "name": "财通证券", + "tag": "金融", + "reason": "" + }, + { + "code": "000728", + "name": "国元证券", + "tag": "金融", + "reason": "" + }, + { + "code": "601901", + "name": "方正证券", + "tag": "金融", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "600909", + "name": "华安证券", + "tag": "金融", + "reason": "" + }, + { + "code": "600030", + "name": "中信证券", + "tag": "金融", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "601162", + "name": "天风证券", + "tag": "金融", + "reason": "" + }, + { + "code": "002673", + "name": "西部证券", + "tag": "金融", + "reason": "" + }, + { + "code": "600109", + "name": "国金证券", + "tag": "金融", + "reason": "" + }, + { + "code": "002926", + "name": "华西证券", + "tag": "金融", + "reason": "" + }, + { + "code": "600369", + "name": "西南证券", + "tag": "金融", + "reason": "" + }, + { + "code": "600864", + "name": "哈投股份", + "tag": "金融", + "reason": "" + }, + { + "code": "600621", + "name": "华鑫股份", + "tag": "金融", + "reason": "" + }, + { + "code": "600155", + "name": "华创云信", + "tag": "金融", + "reason": "" + }, + { + "code": "002670", + "name": "国盛金控", + "tag": "金融", + "reason": "" + }, + { + "code": "000166", + "name": "申万宏源", + "tag": "金融", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "601198", + "name": "东兴证券", + "tag": "金融", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "601696", + "name": "中银证券", + "tag": "金融", + "reason": "" + }, + { + "code": "601788", + "name": "光大证券", + "tag": "金融", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "601881", + "name": "中国银河", + "tag": "金融", + "reason": "", + "hidden_tag": "中字头" + }, + { + "code": "601375", + "name": "中原证券", + "tag": "金融", + "reason": "" + }, + { + "code": "601995", + "name": "中金公司", + "tag": "金融", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "601236", + "name": "红塔证券", + "tag": "金融", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "000712", + "name": "锦龙股份", + "tag": "金融", + "reason": "" + }, + { + "code": "600919", + "name": "江苏银行", + "tag": "金融", + "reason": "" + }, + { + "code": "601860", + "name": "紫金银行", + "tag": "金融", + "reason": "" + }, + { + "code": "600015", + "name": "华夏银行", + "tag": "金融", + "reason": "" + }, + { + "code": "002839", + "name": "张家港行", + "tag": "金融", + "reason": "" + }, + { + "code": "600908", + "name": "无锡银行", + "tag": "金融", + "reason": "" + }, + { + "code": "601818", + "name": "光大银行", + "tag": "金融", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "601128", + "name": "常熟银行", + "tag": "金融", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "601997", + "name": "贵阳银行", + "tag": "金融", + "reason": "" + }, + { + "code": "601187", + "name": "厦门银行", + "tag": "金融", + "reason": "" + }, + { + "code": "601665", + "name": "齐鲁银行", + "tag": "金融", + "reason": "" + }, + { + "code": "601939", + "name": "建设银行", + "tag": "金融", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "601166", + "name": "兴业银行", + "tag": "金融", + "reason": "" + }, + { + "code": "601528", + "name": "瑞丰银行", + "tag": "金融", + "reason": "" + }, + { + "code": "601169", + "name": "北京银行", + "tag": "金融", + "reason": "" + }, + { + "code": "002142", + "name": "宁波银行", + "tag": "金融", + "reason": "" + }, + { + "code": "601229", + "name": "上海银行", + "tag": "金融", + "reason": "" + }, + { + "code": "002807", + "name": "江阴银行", + "tag": "金融", + "reason": "" + }, + { + "code": "600928", + "name": "西安银行", + "tag": "金融", + "reason": "" + }, + { + "code": "601398", + "name": "工商银行", + "tag": "金融", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "603323", + "name": "苏农银行", + "tag": "金融", + "reason": "" + }, + { + "code": "601288", + "name": "农业银行", + "tag": "金融", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "601658", + "name": "邮储银行", + "tag": "金融", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "601577", + "name": "长沙银行", + "tag": "金融", + "reason": "" + }, + { + "code": "601328", + "name": "交通银行", + "tag": "金融", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "600926", + "name": "杭州银行", + "tag": "金融", + "reason": "" + }, + { + "code": "002936", + "name": "郑州银行", + "tag": "金融", + "reason": "" + }, + { + "code": "002958", + "name": "青农商行", + "tag": "金融", + "reason": "" + }, + { + "code": "601825", + "name": "沪农商行", + "tag": "金融", + "reason": "" + }, + { + "code": "601009", + "name": "南京银行", + "tag": "金融", + "reason": "" + }, + { + "code": "002948", + "name": "青岛银行", + "tag": "金融", + "reason": "" + }, + { + "code": "601916", + "name": "浙商银行", + "tag": "金融", + "reason": "" + }, + { + "code": "601077", + "name": "渝农商行", + "tag": "金融", + "reason": "" + }, + { + "code": "601988", + "name": "中国银行", + "tag": "金融", + "reason": "", + "hidden_tag": "中字头" + }, + { + "code": "601838", + "name": "成都银行", + "tag": "金融", + "reason": "" + }, + { + "code": "001227", + "name": "兰州银行", + "tag": "金融", + "reason": "" + }, + { + "code": "601998", + "name": "中信银行", + "tag": "金融", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "600000", + "name": "浦发银行", + "tag": "金融", + "reason": "" + }, + { + "code": "002966", + "name": "苏州银行", + "tag": "金融", + "reason": "" + }, + { + "code": "000001", + "name": "平安银行", + "tag": "金融", + "reason": "" + }, + { + "code": "600016", + "name": "民生银行", + "tag": "金融", + "reason": "" + }, + { + "code": "600036", + "name": "招商银行", + "tag": "金融", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "601963", + "name": "重庆银行", + "tag": "金融", + "reason": "" + }, + { + "code": "600816", + "name": "建元信托", + "tag": "金融", + "reason": "" + }, + { + "code": "600053", + "name": "九鼎投资", + "tag": "金融", + "reason": "" + }, + { + "code": "002177", + "name": "御银股份", + "tag": "金融", + "reason": "" + }, + { + "code": "601519", + "name": "大智慧", + "tag": "金融", + "reason": "", + "hidden_tag": null + }, + { + "code": "000532", + "name": "华金资本", + "tag": "金融", + "reason": "", + "hidden_tag": null + }, + { + "code": "600318", + "name": "新力金融", + "tag": "金融", + "reason": "" + }, + { + "code": "603106", + "name": "恒银科技", + "tag": "金融", + "reason": "" + }, + { + "code": "605111", + "name": "新洁能", + "tag": "半导体", + "reason": "", + "hidden_tag": null + }, + { + "code": "688486", + "name": "龙迅股份", + "tag": "半导体", + "reason": "", + "hidden_tag": "次新股" + }, + { + "code": "300623", + "name": "捷捷微电", + "tag": "半导体", + "reason": "", + "hidden_tag": null + }, + { + "code": "002559", + "name": "亚威股份", + "tag": "半导体", + "reason": "", + "hidden_tag": null + }, + { + "code": "002654", + "name": "万润科技", + "tag": "半导体", + "reason": "", + "hidden_tag": null + }, + { + "code": "000037", + "name": "深南电A", + "tag": "电力", + "reason": "", + "hidden_tag": null + }, + { + "code": "301012", + "name": "扬电科技", + "tag": "电力", + "reason": "" + }, + { + "code": "000777", + "name": "中核科技", + "tag": "电力", + "reason": "", + "hidden_tag": "中字头" + }, + { + "code": "000966", + "name": "长源电力", + "tag": "电力", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "600744", + "name": "华银电力", + "tag": "电力", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "002255", + "name": "海陆重工", + "tag": "电力", + "reason": "" + }, + { + "code": "002471", + "name": "中超控股", + "tag": "电力", + "reason": "" + }, + { + "code": "603530", + "name": "神马电力", + "tag": "电力", + "reason": "" + }, + { + "code": "605011", + "name": "杭州热电", + "tag": "电力", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "605167", + "name": "利柏特", + "tag": "电力", + "reason": "" + }, + { + "code": "001332", + "name": "锡装股份", + "tag": "电力", + "reason": "", + "hidden_tag": "次新股" + }, + { + "code": "300875", + "name": "捷强装备", + "tag": "电力", + "reason": "" + }, + { + "code": "002728", + "name": "特一药业", + "tag": "医药", + "reason": "", + "hidden_tag": null + }, + { + "code": "002750", + "name": "龙津药业", + "tag": "医药", + "reason": "", + "hidden_tag": null + }, + { + "code": "300181", + "name": "佐力药业", + "tag": "医药", + "reason": "", + "hidden_tag": null + }, + { + "code": "000590", + "name": "启迪药业", + "tag": "医药", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "300534", + "name": "陇神戎发", + "tag": "医药", + "reason": "", + "hidden_tag": null + }, + { + "code": "000766", + "name": "通化金马", + "tag": "医药", + "reason": "", + "hidden_tag": null + }, + { + "code": "600272", + "name": "开开实业", + "tag": "医药", + "reason": "", + "hidden_tag": null + }, + { + "code": "002432", + "name": "九安医疗", + "tag": "医药", + "reason": "", + "hidden_tag": null + }, + { + "code": "603168", + "name": "莎普爱思", + "tag": "医药", + "reason": "" + }, + { + "code": "688222", + "name": "成都先导", + "tag": "医药", + "reason": "" + }, + { + "code": "000820", + "name": "神雾节能", + "tag": "公用", + "reason": "" + }, + { + "code": "300197", + "name": "节能铁汉", + "tag": "公用", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "300172", + "name": "中电环保", + "tag": "公用", + "reason": "" + }, + { + "code": "603955", + "name": "大千生态", + "tag": "公用", + "reason": "" + }, + { + "code": "000010", + "name": "美丽生态", + "tag": "公用", + "reason": "" + }, + { + "code": "002140", + "name": "东华科技", + "tag": "公用", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "605069", + "name": "正和生态", + "tag": "公用", + "reason": "" + }, + { + "code": "603291", + "name": "联合水务", + "tag": "公用", + "reason": "", + "hidden_tag": "次新股" + }, + { + "code": "300262", + "name": "巴安水务", + "tag": "公用", + "reason": "" + }, + { + "code": "603029", + "name": "天鹅股份", + "tag": "农业", + "reason": "" + }, + { + "code": "002758", + "name": "浙农股份", + "tag": "农业", + "reason": "", + "hidden_tag": null + }, + { + "code": "600354", + "name": "敦煌种业", + "tag": "农业", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "003042", + "name": "中农联合", + "tag": "农业", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "603909", + "name": "建发合诚", + "tag": "房地产", + "reason": "" + }, + { + "code": "605286", + "name": "同力日升", + "tag": "房地产", + "reason": "" + }, + { + "code": "002789", + "name": "建艺集团", + "tag": "房地产", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600284", + "name": "浦东建设", + "tag": "房地产", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "600639", + "name": "浦东金桥", + "tag": "房地产", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "600684", + "name": "珠江股份", + "tag": "房地产", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "603682", + "name": "锦和商管", + "tag": "房地产", + "reason": "" + }, + { + "code": "000029", + "name": "深深房A", + "tag": "房地产", + "reason": "", + "hidden_tag": null + }, + { + "code": "600622", + "name": "光大嘉宝", + "tag": "房地产", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "300989", + "name": "蕾奥规划", + "tag": "房地产", + "reason": "" + }, + { + "code": "000736", + "name": "中交地产", + "tag": "房地产", + "reason": "", + "hidden_tag": "中字头" + }, + { + "code": "600708", + "name": "光明地产", + "tag": "房地产", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600648", + "name": "外高桥", + "tag": "房地产", + "reason": "", + "hidden_tag": null + }, + { + "code": "000014", + "name": "沙河股份", + "tag": "房地产", + "reason": "", + "hidden_tag": null + }, + { + "code": "002377", + "name": "国创高新", + "tag": "房地产", + "reason": "" + }, + { + "code": "603778", + "name": "国晟科技", + "tag": "房地产", + "reason": "" + }, + { + "code": "000608", + "name": "阳光股份", + "tag": "房地产", + "reason": "", + "hidden_tag": null + }, + { + "code": "605287", + "name": "德才股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "600657", + "name": "信达地产", + "tag": "房地产", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "600234", + "name": "科新发展", + "tag": "房地产", + "reason": "", + "hidden_tag": null + }, + { + "code": "000656", + "name": "金科股份", + "tag": "房地产", + "reason": "", + "hidden_tag": null + }, + { + "code": "300917", + "name": "特发服务", + "tag": "房地产", + "reason": "" + }, + { + "code": "600675", + "name": "中华企业", + "tag": "房地产", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "002116", + "name": "中国海诚", + "tag": "房地产", + "reason": "", + "hidden_tag": "中字头" + }, + { + "code": "300342", + "name": "天银机电", + "tag": "卫星", + "reason": "", + "hidden_tag": null + }, + { + "code": "603131", + "name": "上海沪工", + "tag": "卫星", + "reason": "", + "hidden_tag": null + }, + { + "code": "000058", + "name": "深赛格", + "tag": "卫星", + "reason": "", + "hidden_tag": null + }, + { + "code": "300405", + "name": "科隆股份", + "tag": "氢能", + "reason": "", + "hidden_tag": null + }, + { + "code": "688551", + "name": "科威尔", + "tag": "氢能", + "reason": "" + }, + { + "code": "603798", + "name": "康普顿", + "tag": "氢能", + "reason": "", + "hidden_tag": null + }, + { + "code": "601226", + "name": "华电重工", + "tag": "氢能", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "603213", + "name": "镇洋发展", + "tag": "氢能", + "reason": "", + "hidden_tag": null + }, + { + "code": "600860", + "name": "京城股份", + "tag": "氢能", + "reason": "" + }, + { + "code": "002381", + "name": "双箭股份", + "tag": "养老", + "reason": "" + }, + { + "code": "002172", + "name": "澳洋健康", + "tag": "养老", + "reason": "", + "hidden_tag": null + }, + { + "code": "002162", + "name": "悦心健康", + "tag": "养老", + "reason": "" + }, + { + "code": "002614", + "name": "奥佳华", + "tag": "养老", + "reason": "" + }, + { + "code": "603610", + "name": "麒盛科技", + "tag": "养老", + "reason": "", + "hidden_tag": null + }, + { + "code": "002174", + "name": "游族网络", + "tag": "AI", + "reason": "" + }, + { + "code": "002919", + "name": "名臣健康", + "tag": "AI", + "reason": "" + }, + { + "code": "002168", + "name": "惠程科技", + "tag": "AI", + "reason": "" + }, + { + "code": "002605", + "name": "姚记科技", + "tag": "AI", + "reason": "" + }, + { + "code": "000720", + "name": "新能泰山", + "tag": "电力", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "300374", + "name": "中铁装配", + "tag": "房地产", + "reason": "", + "hidden_tag": "中字头" + }, + { + "code": "603329", + "name": "上海雅仕", + "tag": "统一大市场", + "reason": "" + }, + { + "code": "000505", + "name": "京粮控股", + "tag": "农业", + "reason": "" + }, + { + "code": "600171", + "name": "上海贝岭", + "tag": "半导体", + "reason": "", + "hidden_tag": "央企" + }, + { + "code": "600653", + "name": "申华控股", + "tag": "汽车", + "reason": "" + }, + { + "code": "600629", + "name": "华建集团", + "tag": "房地产", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600119", + "name": "长江投资", + "tag": "金融", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "301313", + "name": "凡拓数创", + "tag": "AI", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301396", + "name": "宏景科技", + "tag": "AI", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301209", + "name": "联合化学", + "tag": "化工", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688152", + "name": "麒麟信安", + "tag": "AI", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688489", + "name": "三未信安", + "tag": "消费电子", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688435", + "name": "英方软件", + "tag": "AI", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688292", + "name": "浩瀚深度", + "tag": "AI", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301270", + "name": "汉仪股份", + "tag": "AI", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301339", + "name": "通行宝", + "tag": "AI", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301165", + "name": "锐捷网络", + "tag": "AI", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688448", + "name": "磁谷科技", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688475", + "name": "萤石网络", + "tag": "大消费", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301380", + "name": "挖金客", + "tag": "AI", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301311", + "name": "昆船智能", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301283", + "name": "聚胶股份", + "tag": "化工", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301368", + "name": "丰立智能", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301379", + "name": "天山电子", + "tag": "VR", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301277", + "name": "新天地", + "tag": "医药", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688031", + "name": "星环科技-U", + "tag": "AI", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301297", + "name": "富乐德", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301377", + "name": "鼎泰高科", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301171", + "name": "易点天下", + "tag": "AI", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688506", + "name": "百利天恒-U", + "tag": "医药", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688244", + "name": "永信至诚", + "tag": "AI", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301231", + "name": "荣信文化", + "tag": "教育", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301285", + "name": "鸿日达", + "tag": "消费电子", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688403", + "name": "汇成股份", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301290", + "name": "东星医疗", + "tag": "医疗器械", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301330", + "name": "熵基科技", + "tag": "AI", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301361", + "name": "众智科技", + "tag": "电力", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301255", + "name": "通力科技", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301366", + "name": "一博科技", + "tag": "消费电子", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301317", + "name": "鑫磊股份", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688203", + "name": "海正生材", + "tag": "化工", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688141", + "name": "杰华特", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688132", + "name": "邦彦技术", + "tag": "AI", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301115", + "name": "建科股份", + "tag": "专业服务", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301328", + "name": "维峰电子", + "tag": "消费电子", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688525", + "name": "佰维存储", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688351", + "name": "微电生理-U", + "tag": "医疗器械", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688143", + "name": "长盈通", + "tag": "消费电子", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301398", + "name": "星源卓镁", + "tag": "汽车", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301365", + "name": "矩阵股份", + "tag": "房地产", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301391", + "name": "卡莱特", + "tag": "AI", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688401", + "name": "路维光电", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301161", + "name": "唯万密封", + "tag": "化工", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688362", + "name": "甬矽电子", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301389", + "name": "隆扬电子", + "tag": "消费电子", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301276", + "name": "嘉曼服饰", + "tag": "大消费", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688426", + "name": "康为世纪", + "tag": "医药", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688410", + "name": "山外山", + "tag": "医疗器械", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301326", + "name": "捷邦科技", + "tag": "赛道", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688428", + "name": "诺诚健华-U", + "tag": "医药", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301349", + "name": "信德新材", + "tag": "赛道", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301309", + "name": "万得凯", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688420", + "name": "美腾科技", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688480", + "name": "赛恩斯", + "tag": "公用", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688498", + "name": "源杰科技", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688419", + "name": "耐科装备", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301282", + "name": "金禄电子", + "tag": "消费电子", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688455", + "name": "科捷智能", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688416", + "name": "恒烁股份", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688409", + "name": "富创精密", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301359", + "name": "东南电子", + "tag": "消费电子", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688291", + "name": "金橙子", + "tag": "AI", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688387", + "name": "信科移动-U", + "tag": "AI", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688061", + "name": "灿瑞科技", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301388", + "name": "欣灵电气", + "tag": "电力", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688147", + "name": "微导纳米", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301327", + "name": "华宝新能", + "tag": "赛道", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688459", + "name": "哈铁科技", + "tag": "公用", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301176", + "name": "逸豪新材", + "tag": "消费电子", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301335", + "name": "天元宠物", + "tag": "大消费", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688432", + "name": "有研硅", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301105", + "name": "鸿铭股份", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301321", + "name": "翰博高新", + "tag": "VR", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688485", + "name": "九州一轨", + "tag": "公用", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688035", + "name": "德邦科技", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688391", + "name": "钜泉科技", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301300", + "name": "远翔新材", + "tag": "化工", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688381", + "name": "帝奥微", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301267", + "name": "华厦眼科", + "tag": "医药", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301280", + "name": "珠城科技", + "tag": "消费电子", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301299", + "name": "卓创资讯", + "tag": "AI", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688439", + "name": "振华风光", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688073", + "name": "毕得医药", + "tag": "医药", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688275", + "name": "万润新能", + "tag": "赛道", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688376", + "name": "美埃科技", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301223", + "name": "中荣股份", + "tag": "化工", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301273", + "name": "瑞晨环保", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301301", + "name": "川宁生物", + "tag": "医药", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688370", + "name": "丛麟科技", + "tag": "公用", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688392", + "name": "骄成超声", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301338", + "name": "凯格精机", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301265", + "name": "华新环保", + "tag": "公用", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301296", + "name": "新巨丰", + "tag": "化工", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688271", + "name": "联影医疗", + "tag": "医疗器械", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688084", + "name": "晶品特装", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688247", + "name": "宣泰医药", + "tag": "医药", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688172", + "name": "燕东微", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688372", + "name": "伟测科技", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688252", + "name": "天德钰", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301363", + "name": "美好医疗", + "tag": "医疗器械", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301369", + "name": "联动科技", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688137", + "name": "近岸蛋白", + "tag": "医药", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688293", + "name": "奥浦迈", + "tag": "专业服务", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301319", + "name": "唯特偶", + "tag": "消费电子", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301230", + "name": "泓博医药", + "tag": "医药", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301227", + "name": "森鹰窗业", + "tag": "房地产", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301356", + "name": "天振股份", + "tag": "化工", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688184", + "name": "帕瓦股份", + "tag": "赛道", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301331", + "name": "恩威医药", + "tag": "医药", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688114", + "name": "华大智造", + "tag": "医疗器械", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301316", + "name": "慧博云通", + "tag": "AI", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301508", + "name": "中机认检", + "tag": "专业服务", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688716", + "name": "中研股份", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "601083", + "name": "锦江航运", + "tag": "公用", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "601061", + "name": "中信金属", + "tag": "赛道", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301395", + "name": "仁信新材", + "tag": "化工", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301516", + "name": "中远通", + "tag": "赛道", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301172", + "name": "君逸数码", + "tag": "AI", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "001367", + "name": "海森药业", + "tag": "医药", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688629", + "name": "华丰科技", + "tag": "消费电子", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "603137", + "name": "恒尚节能", + "tag": "房地产", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301293", + "name": "三博脑科", + "tag": "医药", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301429", + "name": "森泰股份", + "tag": "房地产", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "603172", + "name": "万丰股份", + "tag": "化工", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "601033", + "name": "永兴股份", + "tag": "公用", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688469", + "name": "芯联集成-U", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301325", + "name": "曼恩斯特", + "tag": "赛道", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301499", + "name": "维科精密", + "tag": "赛道", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301505", + "name": "苏州规划", + "tag": "房地产", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688535", + "name": "华海诚科", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "601065", + "name": "江盐集团", + "tag": "化工", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301558", + "name": "三态股份", + "tag": "大消费", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301533", + "name": "威马农机", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301448", + "name": "开创电气", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "001286", + "name": "陕西能源", + "tag": "电力", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301332", + "name": "德尔玛", + "tag": "大消费", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688507", + "name": "索辰科技", + "tag": "AI", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301390", + "name": "经纬股份", + "tag": "电力", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301469", + "name": "恒达新材", + "tag": "大消费", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301323", + "name": "新莱福", + "tag": "化工", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301519", + "name": "舜禹股份", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301517", + "name": "陕西华达", + "tag": "消费电子", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301355", + "name": "南王科技", + "tag": "大消费", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688563", + "name": "航材股份", + "tag": "军工", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301503", + "name": "智迪科技", + "tag": "消费电子", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301507", + "name": "民生健康", + "tag": "医药", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688539", + "name": "高华科技", + "tag": "消费电子", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301303", + "name": "真兰仪表", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301314", + "name": "科瑞思", + "tag": "AI", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301370", + "name": "国科恒泰", + "tag": "大消费", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301373", + "name": "凌玮科技", + "tag": "化工", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301487", + "name": "盟固利", + "tag": "赛道", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301568", + "name": "思泰克", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688576", + "name": "西山科技", + "tag": "医疗器械", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301202", + "name": "朗威股份", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "603275", + "name": "众辰科技", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "001324", + "name": "长青科技", + "tag": "公用", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301371", + "name": "敷尔佳", + "tag": "大消费", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688631", + "name": "莱斯信息", + "tag": "AI", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "001360", + "name": "南矿集团", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301225", + "name": "恒勃股份", + "tag": "汽车", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301468", + "name": "博盈特焊", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "603135", + "name": "中重科技", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "603325", + "name": "博隆技术", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301500", + "name": "飞南资源", + "tag": "公用", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301559", + "name": "中集环科", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "001282", + "name": "三联锻造", + "tag": "汽车", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301408", + "name": "华人健康", + "tag": "医药", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301428", + "name": "世纪恒通", + "tag": "AI", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301305", + "name": "朗坤环境", + "tag": "公用", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301489", + "name": "思泉新材", + "tag": "消费电子", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301232", + "name": "飞沃科技", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301210", + "name": "金杨股份", + "tag": "赛道", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "001326", + "name": "联域股份", + "tag": "VR", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "601133", + "name": "柏诚股份", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688307", + "name": "中润光学", + "tag": "VR", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "001287", + "name": "中电港", + "tag": "大消费", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688522", + "name": "纳睿雷达", + "tag": "AI", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688653", + "name": "康希通信", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301386", + "name": "未来电器", + "tag": "赛道", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301262", + "name": "海看股份", + "tag": "传媒", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301529", + "name": "福赛科技", + "tag": "汽车", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301439", + "name": "泓淋电力", + "tag": "电力", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301555", + "name": "惠柏新材", + "tag": "化工", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301292", + "name": "海科新源", + "tag": "赛道", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688347", + "name": "华虹公司", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301291", + "name": "明阳电气", + "tag": "电力", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301528", + "name": "多浦乐", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301518", + "name": "长华化学", + "tag": "化工", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688671", + "name": "碧兴物联", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301357", + "name": "北方长龙", + "tag": "汽车", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "603273", + "name": "天元智能", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "001378", + "name": "德冠新材", + "tag": "化工", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "603119", + "name": "浙江荣泰", + "tag": "化工", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301418", + "name": "协昌科技", + "tag": "消费电子", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "603270", + "name": "金帝股份", + "tag": "赛道", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "300870", + "name": "欧陆通", + "tag": "赛道", + "reason": "" + }, + { + "code": "000899", + "name": "赣能股份", + "tag": "赛道", + "reason": "", + "hidden_tag": null + }, + { + "code": "600212", + "name": "绿能慧充", + "tag": "赛道", + "reason": "", + "hidden_tag": null + }, + { + "code": "301141", + "name": "中科磁业", + "tag": "资源", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688570", + "name": "天玛智控", + "tag": "AI", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688334", + "name": "西高院", + "tag": "专业服务", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301246", + "name": "宏源药业", + "tag": "赛道", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "300804", + "name": "广康生化", + "tag": "农业", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301566", + "name": "达利凯普", + "tag": "消费电子", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301272", + "name": "英华特", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301515", + "name": "港通医疗", + "tag": "医疗器械", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "001328", + "name": "登康口腔", + "tag": "化工", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688623", + "name": "双元科技", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301295", + "name": "美硕科技", + "tag": "电力", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301399", + "name": "英特科技", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301512", + "name": "智信精密", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688343", + "name": "云天励飞-U", + "tag": "AI", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "603075", + "name": "热威股份", + "tag": "大消费", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301413", + "name": "安培龙", + "tag": "消费电子", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688479", + "name": "友车科技", + "tag": "AI", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301203", + "name": "国泰环保", + "tag": "公用", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301372", + "name": "科净源", + "tag": "公用", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "603193", + "name": "润本股份", + "tag": "大消费", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "300904", + "name": "威力传动", + "tag": "赛道", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688638", + "name": "誉辰智能", + "tag": "赛道", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688549", + "name": "中巨芯-U", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "603125", + "name": "常青科技", + "tag": "化工", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301281", + "name": "科源制药", + "tag": "医药", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "001358", + "name": "兴欣新材", + "tag": "化工", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301446", + "name": "福事特", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688562", + "name": "航天软件", + "tag": "AI", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301358", + "name": "湖南裕能", + "tag": "赛道", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688450", + "name": "光格科技", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "001373", + "name": "翔腾新材", + "tag": "VR", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301382", + "name": "蜂助手", + "tag": "AI", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301421", + "name": "波长光电", + "tag": "VR", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688523", + "name": "航天环宇", + "tag": "军工", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301310", + "name": "鑫宏业", + "tag": "电力", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301170", + "name": "锡南科技", + "tag": "汽车", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301381", + "name": "赛维时代", + "tag": "AI", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688593", + "name": "新相微", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688651", + "name": "盛邦安全", + "tag": "AI", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301353", + "name": "普莱得", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688543", + "name": "国科军工", + "tag": "军工", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301362", + "name": "民爆光电", + "tag": "大消费", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688573", + "name": "信宇人", + "tag": "赛道", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301378", + "name": "通达海", + "tag": "AI", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301397", + "name": "溯联股份", + "tag": "汽车", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301322", + "name": "绿通科技", + "tag": "公用", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688552", + "name": "航天南湖", + "tag": "AI", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688720", + "name": "艾森股份", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301548", + "name": "崇德科技", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301251", + "name": "威尔高", + "tag": "消费电子", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688352", + "name": "颀中科技", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301456", + "name": "盘古智能", + "tag": "赛道", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688502", + "name": "茂莱光学", + "tag": "VR", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301252", + "name": "同星科技", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301488", + "name": "豪恩汽电", + "tag": "汽车", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "603062", + "name": "麦加芯彩", + "tag": "化工", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301287", + "name": "康力源", + "tag": "大消费", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301329", + "name": "信音电子", + "tag": "消费电子", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301510", + "name": "固高科技", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301459", + "name": "丰茂股份", + "tag": "化工", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688657", + "name": "浩辰软件", + "tag": "AI", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301578", + "name": "辰奕智能", + "tag": "消费电子", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301393", + "name": "昊帆生物", + "tag": "医药", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301419", + "name": "阿莱德", + "tag": "AI", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688646", + "name": "逸飞激光", + "tag": "赛道", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301260", + "name": "格力博", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688582", + "name": "芯动联科", + "tag": "消费电子", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688693", + "name": "锴威特", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "603004", + "name": "鼎龙科技", + "tag": "化工", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688602", + "name": "康鹏科技", + "tag": "赛道", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688146", + "name": "中船特气", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688443", + "name": "智翔金泰-U", + "tag": "医药", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688548", + "name": "广钢气体", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301261", + "name": "恒工精密", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301387", + "name": "光大同创", + "tag": "消费电子", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301348", + "name": "蓝箭电子", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301550", + "name": "斯菱股份", + "tag": "汽车", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688652", + "name": "京仪装备", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301486", + "name": "致尚科技", + "tag": "消费电子", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688610", + "name": "埃科光电", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688620", + "name": "安凯微", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688429", + "name": "时创能源", + "tag": "赛道", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301567", + "name": "贝隆精密", + "tag": "消费电子", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301345", + "name": "涛涛车业", + "tag": "公用", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301320", + "name": "豪江智能", + "tag": "消费电子", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301307", + "name": "美利信", + "tag": "AI", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688603", + "name": "天承科技", + "tag": "消费电子", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688531", + "name": "日联科技", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "001306", + "name": "夏厦精密", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688515", + "name": "裕太微-U", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688458", + "name": "美芯晟", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "001380", + "name": "华纬科技", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688591", + "name": "泰凌微", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688361", + "name": "中科飞测-U", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301509", + "name": "金凯生科", + "tag": "减肥药", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301157", + "name": "华塑科技", + "tag": "赛道", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301498", + "name": "乖宝宠物", + "tag": "大消费", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688627", + "name": "精智达", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688512", + "name": "慧智微-U", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688592", + "name": "司南导航", + "tag": "AI", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688433", + "name": "华曙高科", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688702", + "name": "盛科通信-U", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688484", + "name": "南芯科技", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301577", + "name": "C美信", + "tag": "消费电子", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688249", + "name": "晶合集成", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688478", + "name": "晶升股份", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301525", + "name": "儒竞科技", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301520", + "name": "万邦医药", + "tag": "医药", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "301360", + "name": "荣旗科技", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "603296", + "name": "华勤技术", + "tag": "消费电子", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688648", + "name": "中邮科技", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688581", + "name": "安杰思", + "tag": "医疗器械", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688719", + "name": "爱科赛博", + "tag": "赛道", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "688612", + "name": "威迈斯", + "tag": "汽车", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "603375", + "name": "C盛景微", + "tag": "消费电子", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "873122", + "name": "中纺标", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "601068", + "name": "中铝国际", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "000151", + "name": "中成股份", + "tag": "外销", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "601857", + "name": "中国石油", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "000928", + "name": "中钢国际", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "601800", + "name": "中国交建", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "002401", + "name": "中远海科", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "000797", + "name": "中国武夷", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "600938", + "name": "中国海油", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "600056", + "name": "中国医药", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "601111", + "name": "中国国航", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "002051", + "name": "中工国际", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "601088", + "name": "中国神华", + "tag": "资源", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "600737", + "name": "中粮糖业", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "601600", + "name": "中国铝业", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "601919", + "name": "中远海控", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "000927", + "name": "中国铁物", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "601668", + "name": "中国建筑", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "301215", + "name": "中汽股份", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "600720", + "name": "中交设计", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "600428", + "name": "中远海特", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "600007", + "name": "中国国贸", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "001213", + "name": "中铁特货", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "000758", + "name": "中色股份", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "600328", + "name": "中盐化工", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "601669", + "name": "中国电建", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "600941", + "name": "中国移动", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "600787", + "name": "中储股份", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "600489", + "name": "中金黄金", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "601949", + "name": "中国出版", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "600482", + "name": "中国动力", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "600339", + "name": "中油工程", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "601117", + "name": "中国化学", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "600730", + "name": "中国高科", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "601989", + "name": "中国重工", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "600050", + "name": "中国联通", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "601898", + "name": "中煤能源", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "601016", + "name": "节能风电", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "601618", + "name": "中国中冶", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "603126", + "name": "中材节能", + "tag": "公用", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "300114", + "name": "中航电测", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "601985", + "name": "中国核电", + "tag": "电力", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "601186", + "name": "中国铁建", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "600176", + "name": "中国巨石", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "300140", + "name": "节能环境", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "003816", + "name": "中国广核", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "600158", + "name": "中体产业", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "600528", + "name": "中铁工业", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "600026", + "name": "中远海能", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "601390", + "name": "中国中铁", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "600150", + "name": "中国船舶", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "600916", + "name": "中国黄金", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "000066", + "name": "中国长城", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "000519", + "name": "中兵红箭", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "000831", + "name": "中国稀土", + "tag": "资源", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "601728", + "name": "中国电信", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "601106", + "name": "中国一重", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "601766", + "name": "中国中车", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "300962", + "name": "中金辐照", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "000099", + "name": "中信海直", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "000951", + "name": "中国重汽", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "000009", + "name": "中国宝安", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "002080", + "name": "中材科技", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "601858", + "name": "中国科传", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "601179", + "name": "中国西电", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "601868", + "name": "中国能建", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "600536", + "name": "中国软件", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "601611", + "name": "中国核建", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "600028", + "name": "中国石化", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "002057", + "name": "中钢天源", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "688119", + "name": "中钢洛耐", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "000881", + "name": "中广核技", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "601888", + "name": "中国中免", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "600764", + "name": "中国海防", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "601598", + "name": "中国外运", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "600118", + "name": "中国卫星", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "600685", + "name": "中船防务", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "688009", + "name": "中国通号", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "301058", + "name": "中粮科工", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "603860", + "name": "中公高科", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "600862", + "name": "中航高科", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "600977", + "name": "中国电影", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "601698", + "name": "中国卫通", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "600970", + "name": "中材国际", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "688128", + "name": "中国电研", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "300847", + "name": "中船汉光", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "601965", + "name": "中国汽研", + "tag": "中字头", + "hidden_tag": "中字头", + "reason": "" + }, + { + "code": "872808", + "name": "曙光数创", + "tag": "AI", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "300548", + "name": "博创科技", + "tag": "AI", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "430139", + "name": "华岭股份", + "tag": "半导体", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600409", + "name": "三友化工", + "tag": "化工", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "835640", + "name": "富士达", + "tag": "AI", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "002049", + "name": "紫光国微", + "tag": "半导体", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000666", + "name": "经纬纺机", + "tag": "金融", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600551", + "name": "时代出版", + "tag": "传媒", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600636", + "name": "国新文化", + "tag": "传媒", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "002320", + "name": "海峡股份", + "tag": "公用", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600508", + "name": "上海能源", + "tag": "资源", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600962", + "name": "国投中鲁", + "tag": "大消费", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000819", + "name": "岳阳兴长", + "tag": "公用", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600458", + "name": "时代新材", + "tag": "赛道", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600647", + "name": "*ST同达", + "tag": "大消费", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600396", + "name": "*ST金山", + "tag": "电力", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000999", + "name": "华润三九", + "tag": "医药", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000717", + "name": "中南股份", + "tag": "房地产", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600640", + "name": "国脉文化", + "tag": "传媒", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "002305", + "name": "南国置业", + "tag": "房地产", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000423", + "name": "东阿阿胶", + "tag": "医药", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600048", + "name": "保利发展", + "tag": "房地产", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600058", + "name": "五矿发展", + "tag": "大消费", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600961", + "name": "株冶集团", + "tag": "资源", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000839", + "name": "ST国安", + "tag": "央企", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "300188", + "name": "国投智能", + "tag": "AI", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000031", + "name": "大悦城", + "tag": "房地产", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000566", + "name": "海南海药", + "tag": "医药", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600750", + "name": "江中药业", + "tag": "医药", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600495", + "name": "晋西车轴", + "tag": "公用", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600345", + "name": "长江通信", + "tag": "AI", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600217", + "name": "中再资环", + "tag": "公用", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "001979", + "name": "招商蛇口", + "tag": "房地产", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "601808", + "name": "中海油服", + "tag": "公用", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "002314", + "name": "南山控股", + "tag": "房地产", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000798", + "name": "中水渔业", + "tag": "农业", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "002205", + "name": "国统股份", + "tag": "房地产", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000069", + "name": "华侨城A", + "tag": "房地产", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600501", + "name": "航天晨光", + "tag": "智能机器", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000698", + "name": "沈阳化工", + "tag": "公用", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "601005", + "name": "重庆钢铁", + "tag": "房地产", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600808", + "name": "马钢股份", + "tag": "房地产", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600343", + "name": "航天动力", + "tag": "智能机器", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600011", + "name": "华能国际", + "tag": "电力", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000807", + "name": "云铝股份", + "tag": "资源", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600688", + "name": "上海石化", + "tag": "公用", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600148", + "name": "长春一东", + "tag": "汽车", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600019", + "name": "宝钢股份", + "tag": "房地产", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600027", + "name": "华电国际", + "tag": "电力", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000059", + "name": "华锦股份", + "tag": "公用", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600292", + "name": "远达环保", + "tag": "公用", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000830", + "name": "鲁西化工", + "tag": "化工", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600099", + "name": "林海股份", + "tag": "公用", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600710", + "name": "苏美达", + "tag": "大消费", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "300334", + "name": "津膜科技", + "tag": "公用", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000554", + "name": "泰山石油", + "tag": "公用", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "001965", + "name": "招商公路", + "tag": "公用", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000825", + "name": "太钢不锈", + "tag": "房地产", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000883", + "name": "湖北能源", + "tag": "电力", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000708", + "name": "中信特钢", + "tag": "房地产", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "688248", + "name": "南网科技", + "tag": "电力", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "601872", + "name": "招商轮船", + "tag": "公用", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "601718", + "name": "际华集团", + "tag": "大消费", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000878", + "name": "云南铜业", + "tag": "资源", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000065", + "name": "北方国际", + "tag": "房地产", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "001872", + "name": "招商港口", + "tag": "公用", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600886", + "name": "国投电力", + "tag": "电力", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000898", + "name": "鞍钢股份", + "tag": "房地产", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600698", + "name": "湖南天雁", + "tag": "汽车", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600973", + "name": "宝胜股份", + "tag": "电力", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600262", + "name": "北方股份", + "tag": "智能机器", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "301137", + "name": "哈焊华通", + "tag": "智能机器", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000958", + "name": "电投产融", + "tag": "赛道", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "601918", + "name": "新集能源", + "tag": "资源", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600863", + "name": "内蒙华电", + "tag": "电力", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600581", + "name": "八一钢铁", + "tag": "房地产", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "300425", + "name": "中建环能", + "tag": "公用", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000786", + "name": "北新建材", + "tag": "房地产", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000962", + "name": "东方钽业", + "tag": "资源", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "601880", + "name": "辽港股份", + "tag": "公用", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "300388", + "name": "节能国祯", + "tag": "公用", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600795", + "name": "国电电力", + "tag": "电力", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600184", + "name": "光电股份", + "tag": "智能机器", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600029", + "name": "南方航空", + "tag": "公用", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600115", + "name": "中国东航", + "tag": "公用", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "002462", + "name": "嘉事堂", + "tag": "医药", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600433", + "name": "冠豪高新", + "tag": "大消费", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000028", + "name": "国药一致", + "tag": "医药", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600320", + "name": "振华重工", + "tag": "智能机器", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000877", + "name": "天山股份", + "tag": "房地产", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "002066", + "name": "瑞泰科技", + "tag": "房地产", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600500", + "name": "中化国际", + "tag": "化工", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "601212", + "name": "白银有色", + "tag": "资源", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000731", + "name": "四川美丰", + "tag": "化工", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600550", + "name": "保变电气", + "tag": "电力", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600299", + "name": "安迪苏", + "tag": "化工", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000800", + "name": "一汽解放", + "tag": "汽车", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "300212", + "name": "易华录", + "tag": "AI", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000778", + "name": "新兴铸管", + "tag": "房地产", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600313", + "name": "农发种业", + "tag": "农业", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600517", + "name": "国网英大", + "tag": "电力", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "002246", + "name": "北化股份", + "tag": "化工", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000553", + "name": "安道麦A", + "tag": "农业", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600236", + "name": "桂冠电力", + "tag": "电力", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600198", + "name": "大唐电信", + "tag": "AI", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "002039", + "name": "黔源电力", + "tag": "电力", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "002302", + "name": "西部建设", + "tag": "房地产", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600283", + "name": "钱江水利", + "tag": "公用", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000761", + "name": "本钢板材", + "tag": "房地产", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000008", + "name": "神州高铁", + "tag": "公用", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600062", + "name": "华润双鹤", + "tag": "医药", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "002368", + "name": "太极股份", + "tag": "AI", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600230", + "name": "沧州大化", + "tag": "化工", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "601866", + "name": "中远海发", + "tag": "公用", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "601608", + "name": "中信重工", + "tag": "智能机器", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600900", + "name": "长江电力", + "tag": "电力", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600980", + "name": "北矿科技", + "tag": "资源", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000826", + "name": "启迪环境", + "tag": "公用", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "300597", + "name": "吉大通信", + "tag": "AI", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000875", + "name": "吉电股份", + "tag": "电力", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "001914", + "name": "招商积余", + "tag": "房地产", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600025", + "name": "华能水电", + "tag": "电力", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600582", + "name": "天地科技", + "tag": "智能机器", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600523", + "name": "贵航股份", + "tag": "汽车", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600876", + "name": "凯盛新能", + "tag": "化工", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000629", + "name": "钒钛股份", + "tag": "房地产", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "002037", + "name": "保利联合", + "tag": "化工", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600967", + "name": "内蒙一机", + "tag": "公用", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600505", + "name": "西昌电力", + "tag": "电力", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000032", + "name": "深桑达A", + "tag": "AI", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "601816", + "name": "京沪高铁", + "tag": "统一大市场", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "603698", + "name": "航天工程", + "tag": "专业服务", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600392", + "name": "盛和资源", + "tag": "资源", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000815", + "name": "美利云", + "tag": "大消费", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600379", + "name": "宝光股份", + "tag": "电力", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "601006", + "name": "大秦铁路", + "tag": "统一大市场", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600583", + "name": "海油工程", + "tag": "公用", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "300446", + "name": "航天智造", + "tag": "半导体", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000503", + "name": "国新健康", + "tag": "AI", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600560", + "name": "金自天正", + "tag": "智能机器", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600579", + "name": "克劳斯", + "tag": "智能机器", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600135", + "name": "乐凯胶片", + "tag": "化工", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600497", + "name": "驰宏锌锗", + "tag": "资源", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "300070", + "name": "碧水源", + "tag": "公用", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "601968", + "name": "宝钢包装", + "tag": "化工", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000068", + "name": "华控赛格", + "tag": "公用", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600963", + "name": "岳阳林纸", + "tag": "大消费", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "601399", + "name": "国机重装", + "tag": "智能机器", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "601038", + "name": "一拖股份", + "tag": "农业", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600598", + "name": "北大荒", + "tag": "农业", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600125", + "name": "铁龙物流", + "tag": "统一大市场", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "002128", + "name": "电投能源", + "tag": "资源", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000788", + "name": "北大医药", + "tag": "医药", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600855", + "name": "航天长峰", + "tag": "AI", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "002232", + "name": "启明信息", + "tag": "AI", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "300087", + "name": "荃银高科", + "tag": "农业", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600101", + "name": "明星电力", + "tag": "电力", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600129", + "name": "太极集团", + "tag": "医药", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600845", + "name": "宝信软件", + "tag": "AI", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000768", + "name": "中航西飞", + "tag": "军工", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "002265", + "name": "建设工业", + "tag": "汽车", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "300797", + "name": "钢研纳克", + "tag": "专业服务", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "300523", + "name": "辰安科技", + "tag": "AI", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "300788", + "name": "中信出版", + "tag": "传媒", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "603060", + "name": "国检集团", + "tag": "专业服务", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "601991", + "name": "大唐发电", + "tag": "电力", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600875", + "name": "东方电气", + "tag": "赛道", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600871", + "name": "石化油服", + "tag": "公用", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600726", + "name": "华电能源", + "tag": "电力", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600469", + "name": "风神股份", + "tag": "化工", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600452", + "name": "涪陵电力", + "tag": "电力", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600444", + "name": "国机通用", + "tag": "智能机器", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600435", + "name": "北方导航", + "tag": "AI", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600271", + "name": "航天信息", + "tag": "AI", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600161", + "name": "天坛生物", + "tag": "医药", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600037", + "name": "歌华有线", + "tag": "传媒", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "301048", + "name": "金鹰重工", + "tag": "公用", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "300105", + "name": "龙源技术", + "tag": "赛道", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "002179", + "name": "中航光电", + "tag": "消费电子", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000410", + "name": "沈阳机床", + "tag": "智能机器", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "002025", + "name": "航天电器", + "tag": "消费电子", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "001289", + "name": "龙源电力", + "tag": "电力", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "002268", + "name": "电科网安", + "tag": "AI", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600038", + "name": "中直股份", + "tag": "军工", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600335", + "name": "国机汽车", + "tag": "汽车", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000969", + "name": "安泰科技", + "tag": "智能机器", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600476", + "name": "湘邮科技", + "tag": "AI", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600116", + "name": "三峡水利", + "tag": "电力", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600372", + "name": "中航机载", + "tag": "军工", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600206", + "name": "有研新材", + "tag": "资源", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000851", + "name": "高鸿股份", + "tag": "AI", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "002063", + "name": "远光软件", + "tag": "AI", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "301175", + "name": "中科环保", + "tag": "公用", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600995", + "name": "南网储能", + "tag": "电力", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000663", + "name": "永安林业", + "tag": "房地产", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000547", + "name": "航天发展", + "tag": "AI", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "002258", + "name": "利尔化学", + "tag": "农业", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600990", + "name": "四创电子", + "tag": "AI", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600776", + "name": "东方通信", + "tag": "AI", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000998", + "name": "隆平高科", + "tag": "农业", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600100", + "name": "同方股份", + "tag": "AI", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600131", + "name": "国网信通", + "tag": "AI", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000920", + "name": "沃顿科技", + "tag": "公用", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600968", + "name": "海油发展", + "tag": "公用", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600106", + "name": "重庆路桥", + "tag": "公用", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "002190", + "name": "成飞集成", + "tag": "汽车", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600406", + "name": "国电南瑞", + "tag": "电力", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000591", + "name": "太阳能", + "tag": "电力", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600601", + "name": "方正科技", + "tag": "消费电子", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000727", + "name": "冠捷科技", + "tag": "VR", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "002017", + "name": "东信和平", + "tag": "AI", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600905", + "name": "三峡能源", + "tag": "电力", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "002226", + "name": "江南化工", + "tag": "化工", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000901", + "name": "航天科技", + "tag": "汽车", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600021", + "name": "上海电力", + "tag": "电力", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600151", + "name": "航天机电", + "tag": "赛道", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "688297", + "name": "中无人机", + "tag": "军工", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600316", + "name": "洪都航空", + "tag": "军工", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600084", + "name": "中信尼雅", + "tag": "大消费", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "003035", + "name": "南网能源", + "tag": "专业服务", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "002046", + "name": "国机精工", + "tag": "智能机器", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600511", + "name": "国药股份", + "tag": "医药", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "688396", + "name": "华润微", + "tag": "半导体", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "002544", + "name": "智能机器", + "tag": "AI", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000922", + "name": "佳电股份", + "tag": "智能机器", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600760", + "name": "中航沈飞", + "tag": "军工", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "603128", + "name": "华贸物流", + "tag": "统一大市场", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000970", + "name": "中科三环", + "tag": "资源", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600391", + "name": "航发科技", + "tag": "军工", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "301090", + "name": "华润材料", + "tag": "化工", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "688285", + "name": "高铁电气", + "tag": "公用", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600879", + "name": "航天电子", + "tag": "军工", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600138", + "name": "中青旅", + "tag": "大消费", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "603013", + "name": "亚普股份", + "tag": "汽车", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000625", + "name": "长安汽车", + "tag": "汽车", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "002590", + "name": "万安科技", + "tag": "汽车", + "reason": "" + }, + { + "code": "603897", + "name": "长城科技", + "tag": "汽车", + "reason": "" + }, + { + "code": "000862", + "name": "银星能源", + "tag": "电力", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "300527", + "name": "中船应急", + "tag": "智能机器", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600893", + "name": "航发动力", + "tag": "军工", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600850", + "name": "电科数字", + "tag": "AI", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600562", + "name": "国睿科技", + "tag": "AI", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "002415", + "name": "海康威视", + "tag": "AI", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000021", + "name": "深科技", + "tag": "消费电子", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "688722", + "name": "同益中", + "tag": "化工", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "688509", + "name": "正元地信", + "tag": "AI", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "688151", + "name": "华强科技", + "tag": "智能机器", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600877", + "name": "电科芯片", + "tag": "半导体", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "300024", + "name": "智能机器", + "tag": "智能机器", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000050", + "name": "深天马A", + "tag": "VR", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "688425", + "name": "铁建重工", + "tag": "房地产", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000733", + "name": "振华科技", + "tag": "消费电子", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "688568", + "name": "中科星图", + "tag": "AI", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "002106", + "name": "莱宝高科", + "tag": "VR", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600389", + "name": "江山股份", + "tag": "农业", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "002222", + "name": "福晶科技", + "tag": "VR", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000026", + "name": "飞亚达", + "tag": "大消费", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "688779", + "name": "长远锂科", + "tag": "赛道", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "601156", + "name": "东航物流", + "tag": "统一大市场", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "688190", + "name": "云路股份", + "tag": "资源", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "003009", + "name": "中天火箭", + "tag": "军工", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600195", + "name": "中牧股份", + "tag": "农业", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "300396", + "name": "迪瑞医疗", + "tag": "医疗器械", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600498", + "name": "烽火通信", + "tag": "AI", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "601798", + "name": "蓝科高新", + "tag": "智能机器", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000938", + "name": "紫光股份", + "tag": "AI", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000550", + "name": "江铃汽车", + "tag": "汽车", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "688187", + "name": "时代电气", + "tag": "公用", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "002389", + "name": "航天彩虹", + "tag": "消费电子", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "688015", + "name": "交控科技", + "tag": "公用", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600765", + "name": "中航重机", + "tag": "军工", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "601333", + "name": "广深铁路", + "tag": "统一大市场", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600072", + "name": "中船科技", + "tag": "公用", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600268", + "name": "国电南自", + "tag": "电力", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000738", + "name": "航发控制", + "tag": "军工", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "300455", + "name": "航天智装", + "tag": "AI", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000400", + "name": "许继电气", + "tag": "电力", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "603000", + "name": "人民网", + "tag": "传媒", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "002643", + "name": "万润股份", + "tag": "半导体", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "002732", + "name": "燕塘乳业", + "tag": "大消费", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "002777", + "name": "久远银海", + "tag": "AI", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "603888", + "name": "新华网", + "tag": "传媒", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "603927", + "name": "中科软", + "tag": "AI", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600420", + "name": "国药现代", + "tag": "医药", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "300516", + "name": "久之洋", + "tag": "消费电子", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "688586", + "name": "江航装备", + "tag": "军工", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "300557", + "name": "理工光科", + "tag": "智能机器", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "002338", + "name": "奥普光电", + "tag": "智能机器", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600312", + "name": "平高电气", + "tag": "电力", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "688295", + "name": "中复神鹰", + "tag": "化工", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "300747", + "name": "锐科激光", + "tag": "消费电子", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "300073", + "name": "当升科技", + "tag": "赛道", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "688456", + "name": "有研粉材", + "tag": "资源", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600552", + "name": "凯盛科技", + "tag": "VR", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "300294", + "name": "博雅生物", + "tag": "医药", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600081", + "name": "东风科技", + "tag": "汽车", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "002281", + "name": "光迅科技", + "tag": "AI", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "002916", + "name": "深南电路", + "tag": "消费电子", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "603019", + "name": "中科曙光", + "tag": "AI", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600378", + "name": "昊华科技", + "tag": "化工", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600189", + "name": "泉阳泉", + "tag": "大消费", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000852", + "name": "石化机械", + "tag": "智能机器", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "000537", + "name": "中绿电", + "tag": "电力", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "688707", + "name": "振华新材", + "tag": "赛道", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600006", + "name": "东风汽车", + "tag": "汽车", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "300678", + "name": "中科信息", + "tag": "AI", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "688041", + "name": "海光信息", + "tag": "半导体", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "002935", + "name": "天奥电子", + "tag": "AI", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "300034", + "name": "钢研高纳", + "tag": "资源", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "688375", + "name": "国博电子", + "tag": "消费电子", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "688569", + "name": "铁科轨道", + "tag": "公用", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "003031", + "name": "中瓷电子", + "tag": "AI", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600486", + "name": "扬农化工", + "tag": "农业", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "600775", + "name": "南京熊猫", + "tag": "AI", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "603970", + "name": "中农立华", + "tag": "农业", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "001223", + "name": "欧克科技", + "tag": "赛道", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "001299", + "name": "美能能源", + "tag": "公用", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "603130", + "name": "云中马", + "tag": "大消费", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "603163", + "name": "圣晖集成", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "001222", + "name": "源飞宠物", + "tag": "大消费", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "001301", + "name": "尚太科技", + "tag": "赛道", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "001298", + "name": "好上好", + "tag": "消费电子", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "001256", + "name": "炜冈科技", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "001322", + "name": "箭牌家居", + "tag": "房地产", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "001259", + "name": "利仁科技", + "tag": "大消费", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "603237", + "name": "五芳斋", + "tag": "大消费", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "001255", + "name": "博菲电气", + "tag": "赛道", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "001231", + "name": "农心科技", + "tag": "农业", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "001333", + "name": "光华股份", + "tag": "化工", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "001331", + "name": "胜通能源", + "tag": "统一大市场", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "001338", + "name": "永顺泰", + "tag": "大消费", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "001330", + "name": "博纳影业", + "tag": "传媒", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "603151", + "name": "邦基科技", + "tag": "农业", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "603057", + "name": "紫燕食品", + "tag": "大消费", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "001283", + "name": "豪鹏科技", + "tag": "赛道", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "603255", + "name": "鼎际得", + "tag": "化工", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "603182", + "name": "嘉华股份", + "tag": "大消费", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "001260", + "name": "坤泰股份", + "tag": "汽车", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "001225", + "name": "和泰机电", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "600925", + "name": "苏能股份", + "tag": "资源", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "601121", + "name": "宝地矿业", + "tag": "房地产", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "001337", + "name": "四川黄金", + "tag": "资源", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "603073", + "name": "彩蝶实业", + "tag": "大消费", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "001376", + "name": "百通能源", + "tag": "公用", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "603162", + "name": "海通发展", + "tag": "公用", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "001278", + "name": "一彬科技", + "tag": "汽车", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "603190", + "name": "亚通精工", + "tag": "汽车", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "603065", + "name": "宿迁联盛", + "tag": "化工", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "603307", + "name": "扬州金泉", + "tag": "大消费", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "603282", + "name": "亚光股份", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "001311", + "name": "多利科技", + "tag": "汽车", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "603173", + "name": "福斯达", + "tag": "智能机器", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "603153", + "name": "上海建科", + "tag": "房地产", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "002883", + "name": "中设股份", + "tag": "房地产", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "603061", + "name": "金海通", + "tag": "半导体", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "603281", + "name": "江瀚新材", + "tag": "化工", + "hidden_tag": "次新股", + "reason": "" + }, + { + "code": "603648", + "name": "畅联股份", + "tag": "统一大市场", + "hidden_tag": "央企", + "reason": "" + }, + { + "code": "900948", + "name": "伊泰B股", + "tag": "资源", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "601699", + "name": "潞安环能", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "601225", + "name": "陕西煤业", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "601101", + "name": "昊华能源", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "601001", + "name": "晋控煤业", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600546", + "name": "山煤国际", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000983", + "name": "山西焦煤", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000937", + "name": "冀中能源", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "601666", + "name": "平煤股份", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600985", + "name": "淮北矿业", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600188", + "name": "兖矿能源", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600123", + "name": "兰花科创", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600971", + "name": "恒源煤电", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600348", + "name": "华阳股份", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600997", + "name": "开滦股份", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600395", + "name": "盘江股份", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000552", + "name": "甘肃能化", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600740", + "name": "山西焦化", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600725", + "name": "云维股份", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000571", + "name": "新大洲A", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000723", + "name": "美锦能源", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600397", + "name": "安源煤业", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600758", + "name": "辽宁能源", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "601015", + "name": "陕西黑猫", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "601011", + "name": "宝泰隆", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600121", + "name": "郑州煤电", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600792", + "name": "云煤能源", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600403", + "name": "大有能源", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600408", + "name": "安泰集团", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "839167", + "name": "同享科技", + "tag": "资源", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "871634", + "name": "新威凌", + "tag": "资源", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "000688", + "name": "国城矿业", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000933", + "name": "神火股份", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "601168", + "name": "西部矿业", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000630", + "name": "铜陵有色", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600362", + "name": "江西铜业", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600219", + "name": "南山铝业", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002532", + "name": "天山铝业", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002203", + "name": "海亮股份", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688122", + "name": "西部超导", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000060", + "name": "中金岭南", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "601137", + "name": "博威合金", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688257", + "name": "新锐股份", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600888", + "name": "新疆众和", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000751", + "name": "锌业股份", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "601702", + "name": "华峰铝业", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "601677", + "name": "明泰铝业", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002379", + "name": "宏创控股", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600595", + "name": "中孚实业", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603876", + "name": "鼎胜新材", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000612", + "name": "焦作万方", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002160", + "name": "常铝股份", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002171", + "name": "楚江新材", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000426", + "name": "兴业银锡", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600255", + "name": "鑫科材料", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688786", + "name": "悦安新材", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600531", + "name": "豫光金铅", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002988", + "name": "豪美新材", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300855", + "name": "图南股份", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603132", + "name": "金徽股份", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "601609", + "name": "金田股份", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000737", + "name": "北方铜业", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600490", + "name": "鹏欣资源", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600361", + "name": "创新新材", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "601388", + "name": "怡球资源", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688102", + "name": "斯瑞新材", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002182", + "name": "宝武镁业", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002540", + "name": "亚太科技", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300337", + "name": "银邦股份", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002824", + "name": "和胜股份", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603115", + "name": "海星股份", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002501", + "name": "利源股份", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "003038", + "name": "鑫铂股份", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600331", + "name": "宏达股份", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600338", + "name": "西藏珠峰", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300697", + "name": "电工合金", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300930", + "name": "屹通新材", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000633", + "name": "合金投资", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000603", + "name": "盛达资源", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002996", + "name": "顺博合金", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688231", + "name": "隆达股份", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603937", + "name": "丽岛新材", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603527", + "name": "众源新材", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603978", + "name": "深圳新星", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002295", + "name": "精艺股份", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300963", + "name": "中洲特材", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300828", + "name": "锐新科技", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603045", + "name": "福达合金", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600768", + "name": "宁波富邦", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002114", + "name": "罗平锌电", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300986", + "name": "志特新材", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002806", + "name": "华锋股份", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300489", + "name": "光智科技", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300057", + "name": "万顺新材", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "601020", + "name": "华钰矿业", + "tag": "资源", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002235", + "name": "安妮股份", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600576", + "name": "祥源文旅", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002115", + "name": "三维通信", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600052", + "name": "东望时代", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "834021", + "name": "流金科技", + "tag": "传媒", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "300295", + "name": "三六五网", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600373", + "name": "中文传媒", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600136", + "name": "*ST明诚", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "601928", + "name": "凤凰传媒", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "601098", + "name": "中南传媒", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603999", + "name": "读者传媒", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300987", + "name": "川网传媒", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "601019", + "name": "山东出版", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "601999", + "name": "出版传媒", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "601811", + "name": "新华文轩", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000719", + "name": "中原传媒", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300770", + "name": "新媒股份", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "601921", + "name": "浙版传媒", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "601801", + "name": "皖新传媒", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600757", + "name": "长江传媒", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002027", + "name": "分众传媒", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300413", + "name": "芒果超媒", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600959", + "name": "江苏有线", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000156", + "name": "华数传媒", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002238", + "name": "天威视讯", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "605168", + "name": "三人行", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000802", + "name": "北京文化", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600637", + "name": "东方明珠", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002343", + "name": "慈文传媒", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "601929", + "name": "吉视传媒", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600861", + "name": "北京人力", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300795", + "name": "米奥会展", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600229", + "name": "城市传媒", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "601900", + "name": "南方传媒", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600986", + "name": "浙文互联", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000607", + "name": "华媒控股", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300251", + "name": "光线传媒", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000917", + "name": "电广传媒", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002292", + "name": "奥飞娱乐", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002858", + "name": "力盛体育", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300654", + "name": "世纪天鸿", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300182", + "name": "捷成股份", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002400", + "name": "省广集团", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603096", + "name": "新经典", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300133", + "name": "华策影视", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002739", + "name": "万达电影", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300860", + "name": "锋尚文化", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000681", + "name": "视觉中国", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603103", + "name": "横店影视", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "301052", + "name": "果麦文化", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300662", + "name": "科锐国际", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000665", + "name": "湖北广电", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002712", + "name": "思美传媒", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002502", + "name": "ST鼎龙", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300071", + "name": "福石控股", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "301102", + "name": "兆讯传媒", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000892", + "name": "欢瑞世纪", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300063", + "name": "天龙集团", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600936", + "name": "广西广电", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002699", + "name": "*ST美盛", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600556", + "name": "天下秀", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300805", + "name": "电声股份", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300792", + "name": "壹网壹创", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "301025", + "name": "读客文化", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300058", + "name": "蓝色光标", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300027", + "name": "华谊兄弟", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600831", + "name": "广电网络", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300291", + "name": "百纳千成", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603533", + "name": "掌阅科技", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300242", + "name": "佳云科技", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300280", + "name": "紫天科技", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300612", + "name": "宣亚国际", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600996", + "name": "贵广网络", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300426", + "name": "唐德影视", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600358", + "name": "国旅联合", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300061", + "name": "旗天科技", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002905", + "name": "金逸影视", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002995", + "name": "天地在线", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300528", + "name": "幸福蓝海", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300947", + "name": "德必集团", + "tag": "传媒", + "hidden_tag": null, + "reason": "" + }, + { + "code": "900937", + "name": "华电B股", + "tag": "电力", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "900957", + "name": "凌云B股", + "tag": "电力", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "601908", + "name": "京运通", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002608", + "name": "江苏国信", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600674", + "name": "川投能源", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600023", + "name": "浙能电力", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600642", + "name": "申能股份", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000690", + "name": "宝新能源", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000027", + "name": "深圳能源", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600098", + "name": "广州发展", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600578", + "name": "京能电力", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "200539", + "name": "粤电力B", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600163", + "name": "中闽能源", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600157", + "name": "永泰能源", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000543", + "name": "皖能电力", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000791", + "name": "甘肃能源", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600483", + "name": "福能股份", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000767", + "name": "晋控电力", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000539", + "name": "粤电力A", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600982", + "name": "宁波能源", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600509", + "name": "天富能源", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "601619", + "name": "嘉泽新能", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "601778", + "name": "晶科科技", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603693", + "name": "江苏新能", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600032", + "name": "浙江新能", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "605580", + "name": "恒盛能源", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "001896", + "name": "豫能控股", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600868", + "name": "梅雁吉祥", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600167", + "name": "联美控股", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600821", + "name": "金开新能", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000993", + "name": "闽东电力", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600979", + "name": "广安爱众", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000040", + "name": "东旭蓝天", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000600", + "name": "建投能源", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000531", + "name": "穗恒运A", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600644", + "name": "乐山电力", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000722", + "name": "湖南发展", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600310", + "name": "广西能源", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002015", + "name": "协鑫能科", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "001258", + "name": "立新能源", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "605028", + "name": "世茂能源", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600969", + "name": "郴电国际", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600780", + "name": "通宝能源", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000601", + "name": "韶能股份", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002616", + "name": "长青集团", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600149", + "name": "廊坊发展", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "200037", + "name": "深南电B", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600719", + "name": "大连热电", + "tag": "电力", + "hidden_tag": null, + "reason": "" + }, + { + "code": "831010", + "name": "凯添燃气", + "tag": "公用", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "600803", + "name": "新奥股份", + "tag": "公用", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000669", + "name": "ST金鸿", + "tag": "公用", + "hidden_tag": null, + "reason": "" + }, + { + "code": "605368", + "name": "蓝天燃气", + "tag": "公用", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002259", + "name": "ST升达", + "tag": "公用", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603318", + "name": "水发燃气", + "tag": "公用", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600956", + "name": "新天绿能", + "tag": "公用", + "hidden_tag": null, + "reason": "" + }, + { + "code": "601139", + "name": "深圳燃气", + "tag": "公用", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002911", + "name": "佛燃能源", + "tag": "公用", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600681", + "name": "百川能源", + "tag": "公用", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600635", + "name": "大众公用", + "tag": "公用", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603053", + "name": "成都燃气", + "tag": "公用", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002267", + "name": "陕天然气", + "tag": "公用", + "hidden_tag": null, + "reason": "" + }, + { + "code": "605090", + "name": "九丰能源", + "tag": "公用", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002700", + "name": "ST浩源", + "tag": "公用", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000968", + "name": "蓝焰控股", + "tag": "公用", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600917", + "name": "重庆燃气", + "tag": "公用", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600617", + "name": "国新能源", + "tag": "公用", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600903", + "name": "贵州燃气", + "tag": "公用", + "hidden_tag": null, + "reason": "" + }, + { + "code": "605169", + "name": "洪通燃气", + "tag": "公用", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300332", + "name": "天壕能源", + "tag": "公用", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603689", + "name": "皖天然气", + "tag": "公用", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000407", + "name": "胜利股份", + "tag": "公用", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600333", + "name": "长春燃气", + "tag": "公用", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603393", + "name": "新天然气", + "tag": "公用", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300435", + "name": "中泰股份", + "tag": "公用", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000421", + "name": "南京公用", + "tag": "公用", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603706", + "name": "东方环宇", + "tag": "公用", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603080", + "name": "新疆火炬", + "tag": "公用", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300483", + "name": "首华燃气", + "tag": "公用", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000593", + "name": "德龙汇能", + "tag": "公用", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600256", + "name": "广汇能源", + "tag": "公用", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600777", + "name": "新潮能源", + "tag": "公用", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000096", + "name": "广聚能源", + "tag": "公用", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002778", + "name": "中晟高科", + "tag": "公用", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002221", + "name": "东华能源", + "tag": "公用", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603353", + "name": "和顺石油", + "tag": "公用", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600759", + "name": "ST洲际", + "tag": "公用", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600387", + "name": "ST海越", + "tag": "公用", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000637", + "name": "ST实华", + "tag": "公用", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000159", + "name": "国际实业", + "tag": "公用", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300839", + "name": "博汇股份", + "tag": "公用", + "hidden_tag": null, + "reason": "" + }, + { + "code": "839273", + "name": "一致魔芋", + "tag": "大消费", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "839371", + "name": "欧福蛋业", + "tag": "大消费", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "833429", + "name": "康比特", + "tag": "大消费", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "836422", + "name": "润普食品", + "tag": "大消费", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "831726", + "name": "朱老六", + "tag": "大消费", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "836826", + "name": "盖世食品", + "tag": "大消费", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "832023", + "name": "田野股份", + "tag": "大消费", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "300997", + "name": "欢乐家", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "001318", + "name": "阳光乳业", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300898", + "name": "熊猫乳品", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300175", + "name": "朗源股份", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300106", + "name": "西部牧业", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002840", + "name": "华统股份", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "832786", + "name": "骑士乳业", + "tag": "大消费", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "605499", + "name": "东鹏饮料", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600873", + "name": "梅花生物", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603288", + "name": "海天味业", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600872", + "name": "中炬高新", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "001219", + "name": "青岛食品", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603156", + "name": "养元饮品", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603027", + "name": "千禾味业", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002726", + "name": "龙大美食", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600887", + "name": "伊利股份", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002991", + "name": "甘源食品", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300908", + "name": "仲景食品", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300791", + "name": "仙乐健康", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "200019", + "name": "深粮B", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002847", + "name": "盐津铺子", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002557", + "name": "洽洽食品", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300858", + "name": "科拓生物", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000895", + "name": "双汇发展", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002650", + "name": "加加食品", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002507", + "name": "涪陵榨菜", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603886", + "name": "元祖股份", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600597", + "name": "光明乳业", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000848", + "name": "承德露露", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300999", + "name": "金龙鱼", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "301206", + "name": "三元生物", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300146", + "name": "汤臣倍健", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002597", + "name": "金禾实业", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600429", + "name": "三元股份", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002216", + "name": "三全食品", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600305", + "name": "恒顺醋业", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "605567", + "name": "春雪食品", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603043", + "name": "广州酒家", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603235", + "name": "天新药业", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600298", + "name": "安琪酵母", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "605300", + "name": "佳禾食品", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600073", + "name": "上海梅林", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "605179", + "name": "一鸣食品", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002481", + "name": "双塔食品", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "001215", + "name": "千味央厨", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "605016", + "name": "百龙创园", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002946", + "name": "新乳业", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000716", + "name": "黑芝麻", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603517", + "name": "绝味食品", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603102", + "name": "百合股份", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002910", + "name": "庄园牧场", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "605337", + "name": "李子园", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002286", + "name": "保龄宝", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600300", + "name": "维维股份", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603345", + "name": "安井食品", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603317", + "name": "天味食品", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600419", + "name": "天润乳业", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000529", + "name": "广弘控股", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "605089", + "name": "味知香", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002570", + "name": "贝因美", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000911", + "name": "广农糖业", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "605338", + "name": "巴比食品", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002852", + "name": "道道全", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300915", + "name": "海融科技", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "605388", + "name": "均瑶健康", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002702", + "name": "海欣食品", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603697", + "name": "有友食品", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "003030", + "name": "祖名股份", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300094", + "name": "国联水产", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603696", + "name": "安记食品", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "301116", + "name": "益客食品", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "605339", + "name": "南侨食品", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002515", + "name": "金字火腿", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002956", + "name": "西麦食品", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002330", + "name": "得利斯", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002582", + "name": "好想你", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603020", + "name": "爱普股份", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300973", + "name": "立高食品", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600866", + "name": "星湖科技", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603079", + "name": "圣达生物", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002661", + "name": "克明食品", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603866", + "name": "桃李面包", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603755", + "name": "日辰股份", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300765", + "name": "新诺威", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300741", + "name": "华宝股份", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002695", + "name": "煌上煌", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "605077", + "name": "华康股份", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000639", + "name": "西王食品", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002626", + "name": "金达威", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300892", + "name": "品渥食品", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "605198", + "name": "安德利", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300829", + "name": "金丹科技", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688089", + "name": "嘉必优", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600191", + "name": "华资实业", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "900923", + "name": "百联B股", + "tag": "大消费", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "000759", + "name": "中百集团", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600838", + "name": "上海九百", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600306", + "name": "*ST商城", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000417", + "name": "合肥百货", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600729", + "name": "重庆百货", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600785", + "name": "新华百货", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "200058", + "name": "深赛格B", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002416", + "name": "爱施德", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603101", + "name": "汇嘉时代", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002818", + "name": "富森美", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002697", + "name": "红旗连锁", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "601933", + "name": "永辉超市", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "601010", + "name": "文峰股份", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "601828", + "name": "美凯龙", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "601366", + "name": "利群股份", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600827", + "name": "百联股份", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600814", + "name": "杭州解百", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000906", + "name": "浙商中拓", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600694", + "name": "大商股份", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600712", + "name": "南宁百货", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002187", + "name": "广百股份", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600859", + "name": "王府井", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600858", + "name": "银座股份", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600738", + "name": "丽尚国潮", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600415", + "name": "小商品城", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600778", + "name": "友好集团", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "301078", + "name": "孩子王", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002344", + "name": "海宁皮城", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600287", + "name": "江苏舜天", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600865", + "name": "百大集团", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000564", + "name": "ST大集", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603214", + "name": "爱婴室", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "601116", + "name": "三江购物", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600790", + "name": "轻纺城", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000419", + "name": "通程控股", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600682", + "name": "南京新百", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600857", + "name": "宁波中百", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002024", + "name": "ST易购", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300622", + "name": "博士眼镜", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000882", + "name": "华联股份", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600693", + "name": "东百集团", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000785", + "name": "居然之家", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002277", + "name": "友阿股份", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000701", + "name": "厦门信达", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603708", + "name": "家家悦", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002251", + "name": "*ST步高", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600774", + "name": "汉商集团", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603123", + "name": "翠微股份", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002336", + "name": "人人乐", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300126", + "name": "锐奇股份", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603960", + "name": "克来机电", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603901", + "name": "永创智能", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002214", + "name": "大立科技", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002690", + "name": "美亚光电", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002444", + "name": "巨星科技", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300002", + "name": "神州泰岳", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300124", + "name": "汇川技术", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000961", + "name": "中南建设", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000333", + "name": "美的集团", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300308", + "name": "中际旭创", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688218", + "name": "江苏北人", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600282", + "name": "南钢股份", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300802", + "name": "矩子科技", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002131", + "name": "利欧股份", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688155", + "name": "先惠技术", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002367", + "name": "康力电梯", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600522", + "name": "中天科技", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "003021", + "name": "兆威机电", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002599", + "name": "盛通股份", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002526", + "name": "山东矿机", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600346", + "name": "恒力石化", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600215", + "name": "派斯林", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603656", + "name": "泰禾智能", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600503", + "name": "华丽家族", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002067", + "name": "景兴纸业", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002747", + "name": "埃斯顿", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600835", + "name": "上海机电", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300415", + "name": "伊之密", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603011", + "name": "合锻智能", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603025", + "name": "大豪科技", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300433", + "name": "蓝思科技", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300112", + "name": "万讯自控", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002892", + "name": "科力尔", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600894", + "name": "广日股份", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688255", + "name": "凯尔达", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300450", + "name": "先导智能", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600565", + "name": "迪马股份", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002600", + "name": "领益智造", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300420", + "name": "五洋停车", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300607", + "name": "拓斯达", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300532", + "name": "今天国际", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002903", + "name": "宇环数控", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600728", + "name": "佳都科技", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002472", + "name": "双环传动", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002957", + "name": "科瑞技术", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002229", + "name": "鸿博股份", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603486", + "name": "科沃斯", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000404", + "name": "长虹华意", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002097", + "name": "山河智能", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "601138", + "name": "工业富联", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002011", + "name": "盾安环境", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002184", + "name": "海得控制", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300115", + "name": "长盈精密", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300193", + "name": "佳士科技", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000837", + "name": "秦川机床", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002547", + "name": "春兴精工", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002689", + "name": "远大智能", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "605056", + "name": "咸亨国际", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002090", + "name": "金智科技", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000988", + "name": "华工科技", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000413", + "name": "东旭光电", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688003", + "name": "天准科技", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002722", + "name": "物产金轮", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300201", + "name": "海伦哲", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688025", + "name": "杰普特", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002535", + "name": "林州重机", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002917", + "name": "金奥博", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600699", + "name": "均胜电子", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300382", + "name": "斯莱克", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300475", + "name": "香农芯创", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002008", + "name": "大族激光", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300131", + "name": "英唐智控", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300097", + "name": "智云股份", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000584", + "name": "ST工智", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002583", + "name": "海能达", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300810", + "name": "中科海讯", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300293", + "name": "蓝英装备", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688165", + "name": "埃夫特-U", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002073", + "name": "软控股份", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300195", + "name": "长荣股份", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603508", + "name": "思维列控", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300173", + "name": "福能东方", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603416", + "name": "信捷电气", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603030", + "name": "*ST全筑", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002380", + "name": "科远智慧", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002625", + "name": "光启技术", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002026", + "name": "山东威达", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300044", + "name": "赛为智能", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002358", + "name": "森源电气", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300161", + "name": "华中数控", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300580", + "name": "贝斯特", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002139", + "name": "拓邦股份", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002441", + "name": "众业达", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688277", + "name": "天智航-U", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300486", + "name": "东杰智能", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002979", + "name": "雷赛智能", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688558", + "name": "国盛智科", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "301112", + "name": "信邦智能", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002228", + "name": "合兴包装", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002209", + "name": "达 意 隆", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002348", + "name": "高乐股份", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300249", + "name": "依米康", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002497", + "name": "雅化集团", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000795", + "name": "英洛华", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "003019", + "name": "宸展光电", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002334", + "name": "英威腾", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300307", + "name": "慈星股份", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002698", + "name": "博实股份", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300134", + "name": "大富科技", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603496", + "name": "恒为科技", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002283", + "name": "天润工业", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600288", + "name": "大恒科技", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688090", + "name": "瑞松科技", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002363", + "name": "隆基机械", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300853", + "name": "申昊科技", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002009", + "name": "天奇股份", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300466", + "name": "赛摩智能", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603203", + "name": "快克智能", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300278", + "name": "华昌达", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002006", + "name": "精工科技", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688022", + "name": "瀚川智能", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "001696", + "name": "宗申动力", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002520", + "name": "日发精机", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600843", + "name": "上工申贝", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300461", + "name": "田中精机", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "301199", + "name": "迈赫股份", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300154", + "name": "瑞凌股份", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300358", + "name": "楚天科技", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688290", + "name": "景业智能", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "301298", + "name": "东利机械", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300222", + "name": "科大智能", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002846", + "name": "英联股份", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603895", + "name": "天永智能", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "835579", + "name": "机科股份", + "tag": "智能机器", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "600520", + "name": "文一科技", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300472", + "name": "新元科技", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300099", + "name": "尤洛卡", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300391", + "name": "长药控股", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688306", + "name": "均普智能", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300279", + "name": "和晶科技", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002376", + "name": "新北洋", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300543", + "name": "朗科智能", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300865", + "name": "大宏立", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300282", + "name": "*ST三盛", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002427", + "name": "尤夫股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "835237", + "name": "力佳科技", + "tag": "赛道", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "833523", + "name": "德瑞锂电", + "tag": "赛道", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "000616", + "name": "*ST海投", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "833781", + "name": "瑞奇智造", + "tag": "赛道", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "836239", + "name": "长虹能源", + "tag": "赛道", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "835185", + "name": "贝特瑞", + "tag": "赛道", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "873152", + "name": "天宏锂电", + "tag": "赛道", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "834033", + "name": "康普化学", + "tag": "赛道", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "830809", + "name": "安达科技", + "tag": "赛道", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "603032", + "name": "德新科技", + "tag": "智能机器", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002801", + "name": "微光股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300037", + "name": "新宙邦", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000651", + "name": "格力电器", + "tag": "大消费", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600039", + "name": "四川路桥", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300068", + "name": "南都电源", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603717", + "name": "天域生态", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "601238", + "name": "广汽集团", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002601", + "name": "龙佰集团", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002741", + "name": "光华科技", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600673", + "name": "东阳光", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600426", + "name": "华鲁恒升", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688819", + "name": "天能股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002371", + "name": "北方华创", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600143", + "name": "金发科技", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300750", + "name": "宁德时代", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000408", + "name": "藏格矿业", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600096", + "name": "云天化", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002594", + "name": "比亚迪", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300991", + "name": "创益通", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600160", + "name": "巨化股份", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000063", + "name": "中兴通讯", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603826", + "name": "坤彩科技", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603665", + "name": "康隆达", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600210", + "name": "紫江企业", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000792", + "name": "盐湖股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "601678", + "name": "滨化股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002077", + "name": "大港股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603379", + "name": "三美股份", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600111", + "name": "北方稀土", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300207", + "name": "欣旺达", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002108", + "name": "沧州明珠", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300014", + "name": "亿纬锂能", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300801", + "name": "泰和科技", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600232", + "name": "金鹰股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600549", + "name": "厦门钨业", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000559", + "name": "万向钱潮", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688557", + "name": "兰剑智能", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600309", + "name": "万华化学", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "301219", + "name": "腾远钴业", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002245", + "name": "蔚蓝锂芯", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300438", + "name": "鹏辉能源", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300497", + "name": "富祥药业", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000570", + "name": "苏常柴A", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600839", + "name": "四川长虹", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300587", + "name": "天铁股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002074", + "name": "国轩高科", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "601222", + "name": "林洋能源", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688006", + "name": "杭可科技", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600516", + "name": "方大炭素", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688772", + "name": "珠海冠宇", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300487", + "name": "蓝晓科技", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300457", + "name": "赢合科技", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "001203", + "name": "大中矿业", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002812", + "name": "恩捷股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603799", + "name": "华友钴业", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "605589", + "name": "圣泉集团", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002386", + "name": "天原股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300217", + "name": "东方电热", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688360", + "name": "德马科技", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "601311", + "name": "骆驼股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300116", + "name": "保力新", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300019", + "name": "硅宝科技", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002709", + "name": "天赐材料", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603010", + "name": "万盛股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600525", + "name": "长园集团", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002460", + "name": "赣锋锂业", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603659", + "name": "璞泰来", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600955", + "name": "维远股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300510", + "name": "金冠股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002418", + "name": "康盛股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300035", + "name": "中科电气", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688599", + "name": "天合光能", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300567", + "name": "精测电子", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300390", + "name": "天华新能", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300568", + "name": "星源材质", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603679", + "name": "华体科技", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002240", + "name": "盛新锂能", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600869", + "name": "远东股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603067", + "name": "振华股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300376", + "name": "易事特", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300041", + "name": "回天新材", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688516", + "name": "奥特维", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688559", + "name": "海目星", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "601969", + "name": "海南矿业", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002045", + "name": "国光电器", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002091", + "name": "江苏国泰", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002121", + "name": "科陆电子", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002089", + "name": "*ST新海", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002734", + "name": "利民股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300545", + "name": "联得装备", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688518", + "name": "联赢激光", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002426", + "name": "胜利精密", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002407", + "name": "多氟多", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600152", + "name": "维科技术", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002733", + "name": "雄韬股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603283", + "name": "赛腾股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000976", + "name": "ST华铁", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600773", + "name": "西藏城投", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002309", + "name": "ST中利", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002145", + "name": "中核钛白", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002340", + "name": "格林美", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300128", + "name": "锦富技术", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300080", + "name": "易成新能", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002850", + "name": "科达利", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002256", + "name": "兆新股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002052", + "name": "ST同洲", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688005", + "name": "容百科技", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002312", + "name": "川发龙蟒", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603196", + "name": "日播时尚", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688529", + "name": "豪森智能", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600711", + "name": "盛屯矿业", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603220", + "name": "中贝通信", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603303", + "name": "得邦照明", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002518", + "name": "科士达", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688499", + "name": "利元亨", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600731", + "name": "湖南海利", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603806", + "name": "福斯特", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600478", + "name": "科力远", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300538", + "name": "同益股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300082", + "name": "奥克股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "601777", + "name": "力帆科技", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002759", + "name": "天际股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300919", + "name": "中伟股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002539", + "name": "云图控股", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300198", + "name": "纳川股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603399", + "name": "吉翔股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300428", + "name": "立中集团", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688045", + "name": "必易微", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688353", + "name": "华盛锂电", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300707", + "name": "威唐工业", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002298", + "name": "中电兴发", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688567", + "name": "孚能科技", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603823", + "name": "百合花", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600854", + "name": "春兰股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300062", + "name": "中能电气", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002877", + "name": "智能自控", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "301121", + "name": "紫建电子", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600884", + "name": "杉杉股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000049", + "name": "德赛电池", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300565", + "name": "科信技术", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002125", + "name": "湘潭电化", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002591", + "name": "恒大高新", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300953", + "name": "震裕科技", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000545", + "name": "金浦钛业", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603617", + "name": "君禾股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002324", + "name": "普利特", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002068", + "name": "黑猫股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002598", + "name": "山东章鼓", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300409", + "name": "道氏技术", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "301150", + "name": "中一科技", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002056", + "name": "横店东磁", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "301086", + "name": "鸿富瀚", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688345", + "name": "博力威", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600110", + "name": "诺德股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002076", + "name": "星光股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002584", + "name": "西陇科学", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688116", + "name": "天奈科技", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002662", + "name": "京威股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "001217", + "name": "华尔泰", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688148", + "name": "芳源股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300018", + "name": "中元股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600067", + "name": "冠城大通", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300876", + "name": "蒙泰高新", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603906", + "name": "龙蟠科技", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300586", + "name": "美联新材", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002346", + "name": "柘中股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300501", + "name": "海顺新材", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300317", + "name": "珈伟新能", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300077", + "name": "国民技术", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300490", + "name": "华自科技", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "301217", + "name": "铜冠铜箔", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300340", + "name": "科恒股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000636", + "name": "风华高科", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300618", + "name": "寒锐钴业", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603928", + "name": "兴业股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300655", + "name": "晶瑞电材", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300432", + "name": "富临精工", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688598", + "name": "金博股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688560", + "name": "明冠新材", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300327", + "name": "中颖电子", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002617", + "name": "露笑科技", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600241", + "name": "时代万恒", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300647", + "name": "超频三", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300117", + "name": "嘉寓股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002213", + "name": "大为股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002859", + "name": "洁美科技", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300484", + "name": "蓝海华腾", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "832522", + "name": "纳科诺尔", + "tag": "赛道", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "002136", + "name": "安 纳 达", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300520", + "name": "科大国创", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603186", + "name": "华正新材", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300410", + "name": "正业科技", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300631", + "name": "久吾高科", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300283", + "name": "温州宏丰", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300582", + "name": "英飞特", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000973", + "name": "佛塑科技", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300890", + "name": "翔丰华", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300985", + "name": "致远新能", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603618", + "name": "杭电股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600367", + "name": "红星发展", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600330", + "name": "天通股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300619", + "name": "金银河", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688733", + "name": "壹石通", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300585", + "name": "奥联电子", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688173", + "name": "希荻微", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688097", + "name": "博众精工", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300477", + "name": "合纵科技", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300980", + "name": "祥源新材", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "833284", + "name": "灵鸽科技", + "tag": "赛道", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "603335", + "name": "迪生力", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688388", + "name": "嘉元科技", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002580", + "name": "圣阳股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603088", + "name": "宁波精达", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688325", + "name": "赛微微电", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688383", + "name": "新益昌", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300252", + "name": "金信诺", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002455", + "name": "百川股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688595", + "name": "芯海科技", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300648", + "name": "星云股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688181", + "name": "八亿时空", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688778", + "name": "厦钨新能", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002823", + "name": "凯中精密", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "605376", + "name": "博迁新材", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300518", + "name": "新迅达", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002288", + "name": "超华科技", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "601515", + "name": "东峰集团", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "831627", + "name": "力王股份", + "tag": "赛道", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "301180", + "name": "万祥科技", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002141", + "name": "贤丰控股", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603315", + "name": "福鞍股份", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300998", + "name": "宁波方正", + "tag": "赛道", + "hidden_tag": null, + "reason": "" + }, + { + "code": "900940", + "name": "大名城B", + "tag": "房地产", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "900903", + "name": "大众B股", + "tag": "房地产", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "000671", + "name": "ST阳光城", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000048", + "name": "京基智农", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "900932", + "name": "陆家B股", + "tag": "房地产", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "900911", + "name": "金桥B股", + "tag": "房地产", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "900902", + "name": "市北B股", + "tag": "房地产", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "900912", + "name": "外高B股", + "tag": "房地产", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "900939", + "name": "汇丽B", + "tag": "房地产", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "900928", + "name": "临港B股", + "tag": "房地产", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "600239", + "name": "云南城投", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600683", + "name": "京投发展", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600692", + "name": "亚通股份", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "200029", + "name": "深深房B", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000090", + "name": "天健集团", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600848", + "name": "上海临港", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600325", + "name": "华发股份", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000897", + "name": "津滨发展", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600266", + "name": "城建发展", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600383", + "name": "金地集团", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002244", + "name": "滨江集团", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "200011", + "name": "深物业B", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000002", + "name": "万 科A", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000006", + "name": "深振业A", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000011", + "name": "深物业A", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "601155", + "name": "新城控股", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600823", + "name": "ST世茂", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600791", + "name": "京能置业", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000036", + "name": "华联控股", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600895", + "name": "张江高科", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600064", + "name": "南京高科", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600340", + "name": "华夏幸福", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600649", + "name": "城投控股", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600159", + "name": "大龙地产", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000517", + "name": "荣安地产", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600376", + "name": "首开股份", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000402", + "name": "金 融 街", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600736", + "name": "苏州高新", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600603", + "name": "广汇物流", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600638", + "name": "新黄浦", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600208", + "name": "新湖中宝", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000886", + "name": "海南高速", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000631", + "name": "顺发恒业", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600807", + "name": "济南高新", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "601512", + "name": "中新集团", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600748", + "name": "上实发展", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000863", + "name": "三湘印象", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600606", + "name": "绿地控股", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600663", + "name": "陆家嘴", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000042", + "name": "中洲控股", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000965", + "name": "天保基建", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600173", + "name": "卧龙地产", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000718", + "name": "苏宁环球", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000514", + "name": "渝 开 发", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600162", + "name": "香江控股", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600716", + "name": "凤凰股份", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002016", + "name": "世荣兆业", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600094", + "name": "大名城", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000809", + "name": "铁岭新城", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600322", + "name": "津投城开", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600743", + "name": "华远地产", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600082", + "name": "海泰发展", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000609", + "name": "中迪投资", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600533", + "name": "栖霞建设", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600665", + "name": "天地源", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002133", + "name": "广宇集团", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000926", + "name": "福星股份", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002208", + "name": "合肥城建", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600510", + "name": "黑牡丹", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600185", + "name": "格力地产", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000620", + "name": "*ST新联", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000573", + "name": "粤宏远A", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "200056", + "name": "皇庭B", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000838", + "name": "财信发展", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600463", + "name": "空港股份", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600641", + "name": "万业企业", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600246", + "name": "万通发展", + "tag": "房地产", + "hidden_tag": null, + "reason": "" + }, + { + "code": "430300", + "name": "辰光医疗", + "tag": "医疗器械", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "832278", + "name": "鹿得医疗", + "tag": "医疗器械", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "870199", + "name": "倍益康", + "tag": "医疗器械", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "872925", + "name": "锦好医疗", + "tag": "医疗器械", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "603658", + "name": "安图生物", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688301", + "name": "奕瑞科技", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688198", + "name": "佰仁医疗", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688617", + "name": "惠泰医疗", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300832", + "name": "新产业", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688050", + "name": "爱博医疗", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688399", + "name": "硕世生物", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688289", + "name": "圣湘生物", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688366", + "name": "昊海生科", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688575", + "name": "亚辉龙", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002223", + "name": "鱼跃医疗", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688016", + "name": "心脉医疗", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688217", + "name": "睿昂基因", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688626", + "name": "翔宇医疗", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "301093", + "name": "华兰股份", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300677", + "name": "英科医疗", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688298", + "name": "东方生物", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300760", + "name": "迈瑞医疗", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002030", + "name": "达安基因", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300406", + "name": "九强生物", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688212", + "name": "澳华内镜", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688075", + "name": "安旭生物", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "605369", + "name": "拱东医疗", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300633", + "name": "开立医疗", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300676", + "name": "华大基因", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688767", + "name": "博拓生物", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300049", + "name": "福瑞股份", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600529", + "name": "山东药玻", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300529", + "name": "健帆生物", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688677", + "name": "海泰新光", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300003", + "name": "乐普医疗", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688105", + "name": "诺唯赞", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688580", + "name": "伟思医疗", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300685", + "name": "艾德生物", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300171", + "name": "东富龙", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "301087", + "name": "可孚医疗", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688139", + "name": "海尔生物", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002901", + "name": "大博医疗", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002932", + "name": "明德生物", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603387", + "name": "基蛋生物", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603880", + "name": "ST南卫", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000710", + "name": "贝瑞基因", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300595", + "name": "欧普康视", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300463", + "name": "迈克生物", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300482", + "name": "万孚生物", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688317", + "name": "之江生物", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002022", + "name": "科华生物", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603987", + "name": "康德莱", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300439", + "name": "美康生物", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300642", + "name": "透景生命", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688389", + "name": "普门科技", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600587", + "name": "新华医疗", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688606", + "name": "奥泰生物", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688338", + "name": "赛科希德", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688273", + "name": "麦澜德", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300314", + "name": "戴维医疗", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688068", + "name": "热景生物", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300206", + "name": "理邦仪器", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300298", + "name": "三诺生物", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688161", + "name": "威高骨科", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002950", + "name": "奥美医疗", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300289", + "name": "利德曼", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300238", + "name": "冠昊生物", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603309", + "name": "维力医疗", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688236", + "name": "春立医疗", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688013", + "name": "天臣医疗", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300318", + "name": "博晖创新", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603301", + "name": "振德医疗", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300639", + "name": "凯普生物", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300981", + "name": "中红医疗", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603976", + "name": "正川股份", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002382", + "name": "蓝帆医疗", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300869", + "name": "康泰医学", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300326", + "name": "凯利泰", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300453", + "name": "三鑫医疗", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002551", + "name": "尚荣医疗", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "301234", + "name": "五洲医疗", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688656", + "name": "浩欧博", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688468", + "name": "科美诊断", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300653", + "name": "正海生物", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "301060", + "name": "兰卫医学", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002817", + "name": "黄山胶囊", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688108", + "name": "赛诺医疗", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688085", + "name": "三友医疗", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "301122", + "name": "采纳股份", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600055", + "name": "万东医疗", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300246", + "name": "宝莱特", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "301097", + "name": "天益医疗", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300753", + "name": "爱朋医疗", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688067", + "name": "爱威科技", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300562", + "name": "乐心医疗", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688314", + "name": "康拓医疗", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688393", + "name": "安必平", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688358", + "name": "祥生医疗", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688607", + "name": "康众医疗", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300030", + "name": "阳普医疗", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688613", + "name": "奥精医疗", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688029", + "name": "南微医学", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603122", + "name": "合富中国", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000668", + "name": "荣丰控股", + "tag": "医疗器械", + "hidden_tag": null, + "reason": "" + }, + { + "code": "836433", + "name": "大唐药业", + "tag": "医药", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "833266", + "name": "生物谷", + "tag": "医药", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "832566", + "name": "梓橦宫", + "tag": "医药", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "300039", + "name": "上海凯宝", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000538", + "name": "云南白药", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600538", + "name": "国发股份", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600998", + "name": "九州通", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002424", + "name": "贵州百灵", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "601607", + "name": "上海医药", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000513", + "name": "丽珠集团", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600518", + "name": "ST康美", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600436", + "name": "片仔癀", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600976", + "name": "健民集团", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002737", + "name": "葵花药业", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002198", + "name": "嘉应制药", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000623", + "name": "吉林敖东", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000950", + "name": "重药控股", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600566", + "name": "济川药业", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603883", + "name": "老百姓", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600993", + "name": "马应龙", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600085", + "name": "同仁堂", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002019", + "name": "亿帆医药", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600103", + "name": "青山纸业", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600285", + "name": "羚锐制药", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000963", + "name": "华东医药", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603858", + "name": "步长制药", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002727", + "name": "一心堂", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600380", + "name": "健康元", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002793", + "name": "罗欣药业", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600572", + "name": "康恩贝", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000661", + "name": "长春高新", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002603", + "name": "以岭药业", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002275", + "name": "桂林三金", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603368", + "name": "柳药集团", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600479", + "name": "千金药业", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600276", + "name": "恒瑞医药", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600351", + "name": "亚宝药业", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002589", + "name": "瑞康医药", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000650", + "name": "仁和药业", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002435", + "name": "长江健康", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603896", + "name": "寿仙谷", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "301015", + "name": "百洋医药", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002773", + "name": "康弘药业", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300009", + "name": "安科生物", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300485", + "name": "赛升药业", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002020", + "name": "京新药业", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002349", + "name": "精华制药", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600422", + "name": "昆药集团", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600881", + "name": "亚泰集团", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002873", + "name": "新天药业", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000989", + "name": "九 芝 堂", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600211", + "name": "西藏药业", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000919", + "name": "金陵药业", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300086", + "name": "康芝药业", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600222", + "name": "太龙药业", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002287", + "name": "奇正藏药", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000705", + "name": "浙江震元", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688658", + "name": "悦康药业", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600664", + "name": "哈药股份", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002219", + "name": "新里程", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300434", + "name": "金石亚药", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600329", + "name": "达仁堂", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000153", + "name": "丰原药业", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002433", + "name": "*ST太安", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300026", + "name": "红日药业", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002317", + "name": "众生药业", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002390", + "name": "信邦制药", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "003020", + "name": "立方制药", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300158", + "name": "振东制药", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603567", + "name": "珍宝岛", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600671", + "name": "*ST目药", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002412", + "name": "汉森制药", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600594", + "name": "益佰制药", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600535", + "name": "天士力", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688799", + "name": "华纳药厂", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002872", + "name": "ST天圣", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300267", + "name": "尔康制药", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002644", + "name": "佛慈制药", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603811", + "name": "诚意药业", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600829", + "name": "人民同泰", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600227", + "name": "赤天化", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688566", + "name": "吉贝尔", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300683", + "name": "海特生物", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300878", + "name": "维康药业", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002566", + "name": "益盛药业", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600557", + "name": "康缘药业", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002864", + "name": "盘龙药业", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300430", + "name": "诚益通", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600613", + "name": "神奇制药", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "301111", + "name": "粤万年青", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300404", + "name": "博济医药", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600668", + "name": "尖峰集团", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600223", + "name": "福瑞达", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300519", + "name": "新光药业", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600080", + "name": "金花股份", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002675", + "name": "东诚药业", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000790", + "name": "华神科技", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002107", + "name": "沃华医药", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000955", + "name": "欣龙控股", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "605199", + "name": "葫芦娃", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300147", + "name": "香雪制药", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002907", + "name": "华森制药", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300006", + "name": "莱美药业", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603998", + "name": "方盛制药", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002082", + "name": "万邦德", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000931", + "name": "中 关 村", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603139", + "name": "康惠制药", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688329", + "name": "艾隆科技", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300254", + "name": "仟源医药", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300412", + "name": "迦南科技", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300016", + "name": "北陆药业", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002826", + "name": "易明医药", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603439", + "name": "贵州三力", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603963", + "name": "大理药业", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000908", + "name": "景峰医药", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "837403", + "name": "康农种业", + "tag": "医药", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "300108", + "name": "ST吉药", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "430047", + "name": "诺思兰德", + "tag": "医药", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "833230", + "name": "欧康医药", + "tag": "医药", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "839729", + "name": "永顺生物", + "tag": "医药", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "837344", + "name": "三元基因", + "tag": "医药", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "832982", + "name": "锦波生物", + "tag": "医药", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "833575", + "name": "康乐卫士", + "tag": "医药", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "688253", + "name": "英诺特", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002007", + "name": "华兰生物", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688488", + "name": "艾迪药业", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688278", + "name": "特宝生物", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688336", + "name": "三生国健", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002252", + "name": "上海莱士", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300122", + "name": "智飞生物", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688276", + "name": "百克生物", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000403", + "name": "派林生物", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688331", + "name": "荣昌生物", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688739", + "name": "成大生物", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603590", + "name": "康辰药业", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688520", + "name": "神州细胞-U", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688163", + "name": "赛伦生物", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688062", + "name": "迈威生物-U", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600867", + "name": "通化东宝", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002038", + "name": "双鹭药业", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300357", + "name": "我武生物", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688235", + "name": "百济神州-U", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300601", + "name": "康泰生物", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688180", + "name": "君实生物-U", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300142", + "name": "沃森生物", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "301207", + "name": "华兰疫苗", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300204", + "name": "舒泰神", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002581", + "name": "未名医药", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603087", + "name": "甘李药业", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "301047", + "name": "义翘神州", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000518", + "name": "四环生物", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688382", + "name": "益方生物-U", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600530", + "name": "ST交昂", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300841", + "name": "康华生物", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688197", + "name": "首药控股-U", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688176", + "name": "亚虹医药-U", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688193", + "name": "仁度生物", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002693", + "name": "双成药业", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600645", + "name": "中源协和", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688185", + "name": "康希诺", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688687", + "name": "凯因科技", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688373", + "name": "盟科药业-U", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688302", + "name": "海创药业-U", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688136", + "name": "科兴制药", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000534", + "name": "万泽股份", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300239", + "name": "东宝生物", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002880", + "name": "卫光生物", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688177", + "name": "百奥泰", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688319", + "name": "欧林生物", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688265", + "name": "南模生物", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688046", + "name": "药康生物", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002437", + "name": "誉衡药业", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300199", + "name": "翰宇药业", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300255", + "name": "常山药业", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002294", + "name": "信立泰", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300558", + "name": "贝达药业", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600267", + "name": "海正药业", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002422", + "name": "科伦药业", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600196", + "name": "复星医药", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000739", + "name": "普洛药业", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600216", + "name": "浙江医药", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000597", + "name": "东北制药", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688578", + "name": "艾力斯", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002653", + "name": "海思科", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002755", + "name": "奥赛康", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002399", + "name": "海普瑞", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688189", + "name": "南新制药", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002923", + "name": "润都股份", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688166", + "name": "博瑞医药", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300149", + "name": "睿智医药", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688321", + "name": "微芯生物", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688221", + "name": "前沿生物-U", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300725", + "name": "药石科技", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688131", + "name": "皓元医药", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603127", + "name": "昭衍新药", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002898", + "name": "赛隆药业", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688356", + "name": "键凯科技", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300436", + "name": "广生堂", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002900", + "name": "哈三联", + "tag": "医药", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002173", + "name": "创新医疗", + "tag": "脑机", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300459", + "name": "汤姆猫", + "tag": "脑机", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002195", + "name": "岩山科技", + "tag": "脑机", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002602", + "name": "世纪华通", + "tag": "脑机", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300007", + "name": "汉威科技", + "tag": "脑机", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002362", + "name": "汉王科技", + "tag": "脑机", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300052", + "name": "中青宝", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300017", + "name": "网宿科技", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300442", + "name": "润泽科技", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000818", + "name": "航锦科技", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002042", + "name": "华孚时尚", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002575", + "name": "群兴玩具", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000977", + "name": "浪潮信息", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002902", + "name": "铭普光磁", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300166", + "name": "东方国信", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688327", + "name": "云从科技-UW", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603881", + "name": "数据港", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300339", + "name": "润和软件", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300113", + "name": "顺网科技", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688008", + "name": "澜起科技", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300383", + "name": "光环新网", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300657", + "name": "弘信电子", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002929", + "name": "润建股份", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600589", + "name": "*ST榕泰", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600804", + "name": "ST鹏博士", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000925", + "name": "众合科技", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002822", + "name": "中装建设", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600666", + "name": "ST瑞德", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002875", + "name": "安奈儿", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300020", + "name": "银江技术", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603636", + "name": "南威软件", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002657", + "name": "中科金财", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "837092", + "name": "汉鑫科技", + "tag": "AI", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "300458", + "name": "全志科技", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300474", + "name": "景嘉微", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300738", + "name": "奥飞数据", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300344", + "name": "立方数科", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300846", + "name": "首都在线", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300798", + "name": "锦鸡股份", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002912", + "name": "中新赛克", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688047", + "name": "龙芯中科", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300571", + "name": "平治信息", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300826", + "name": "测绘股份", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "301085", + "name": "亚康股份", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688135", + "name": "利扬芯片", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603629", + "name": "利通电子", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688589", + "name": "力合微", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688158", + "name": "优刻得-W", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300053", + "name": "航宇微", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300609", + "name": "汇纳科技", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688256", + "name": "寒武纪-U", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688049", + "name": "炬芯科技", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "839493", + "name": "并行科技", + "tag": "AI", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "300825", + "name": "阿尔特", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688787", + "name": "海天瑞声", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "301308", + "name": "江波龙", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688099", + "name": "晶晨股份", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300373", + "name": "扬杰科技", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688120", + "name": "华海清科", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300661", + "name": "圣邦股份", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300042", + "name": "朗科科技", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688981", + "name": "中芯国际", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688332", + "name": "中科蓝讯", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688728", + "name": "格科微", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688766", + "name": "普冉股份", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688123", + "name": "聚辰股份", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002156", + "name": "通富微电", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688153", + "name": "唯捷创芯", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688130", + "name": "晶华微", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688126", + "name": "沪硅产业", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688279", + "name": "峰岹科技", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688012", + "name": "中微公司", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600584", + "name": "长电科技", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300666", + "name": "江丰电子", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002079", + "name": "苏州固锝", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688072", + "name": "拓荆科技", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688002", + "name": "睿创微纳", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "605588", + "name": "冠石科技", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300782", + "name": "卓胜微", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002185", + "name": "华天科技", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688167", + "name": "炬光科技", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688082", + "name": "盛美上海", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603290", + "name": "斯达半导", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688368", + "name": "晶丰明源", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603160", + "name": "汇顶科技", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603986", + "name": "兆易创新", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688200", + "name": "华峰测控", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603893", + "name": "瑞芯微", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688052", + "name": "纳芯微", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300139", + "name": "晓程科技", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600460", + "name": "士兰微", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603501", + "name": "韦尔股份", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688385", + "name": "复旦微电", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300604", + "name": "长川科技", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688608", + "name": "恒玄科技", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000670", + "name": "盈方微", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688536", + "name": "思瑞浦", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300493", + "name": "润欣科技", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300223", + "name": "北京君正", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688213", + "name": "思特威-W", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688037", + "name": "芯源微", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "301050", + "name": "雷电微力", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688521", + "name": "芯原股份", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688661", + "name": "和林微纳", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "605358", + "name": "立昂微", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688261", + "name": "东微半导", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600360", + "name": "华微电子", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300613", + "name": "富瀚微", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688209", + "name": "英集芯", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300672", + "name": "国科微", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688107", + "name": "安路科技", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688259", + "name": "创耀科技", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688230", + "name": "芯导科技", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688380", + "name": "中微半导", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300046", + "name": "台基股份", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002119", + "name": "康强电子", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300706", + "name": "阿石创", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688286", + "name": "敏芯股份", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688220", + "name": "翱捷科技-U", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688138", + "name": "清溢光电", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "301095", + "name": "广立微", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688018", + "name": "乐鑫科技", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688270", + "name": "臻镭科技", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "001270", + "name": "铖昌科技", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688508", + "name": "芯朋微", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688689", + "name": "银河微电", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300831", + "name": "派瑞股份", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603005", + "name": "晶方科技", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603068", + "name": "博通集成", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300671", + "name": "富满微", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "003026", + "name": "中晶科技", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688711", + "name": "宏微科技", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688048", + "name": "长光华芯", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688110", + "name": "东芯股份", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688262", + "name": "国芯科技", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688699", + "name": "明微电子", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688601", + "name": "力芯微", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688798", + "name": "艾为电子", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688234", + "name": "天岳先进", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688233", + "name": "神工股份", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "001309", + "name": "德明利", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603933", + "name": "睿能科技", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688216", + "name": "气派科技", + "tag": "半导体", + "hidden_tag": null, + "reason": "" + }, + { + "code": "832876", + "name": "慧为智能", + "tag": "消费电子", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "603633", + "name": "徕木股份", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "831167", + "name": "鑫汇科", + "tag": "消费电子", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "003028", + "name": "振邦智能", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "001308", + "name": "康冠科技", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688036", + "name": "传音控股", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002881", + "name": "美格智能", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002681", + "name": "奋达科技", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002475", + "name": "立讯精密", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300735", + "name": "光弘科技", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "301067", + "name": "显盈科技", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002925", + "name": "盈趣科技", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002937", + "name": "兴瑞科技", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "605277", + "name": "新亚电子", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300866", + "name": "安克创新", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300256", + "name": "星星科技", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002841", + "name": "视源股份", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300032", + "name": "金龙机电", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002351", + "name": "漫步者", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002993", + "name": "奥海科技", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300322", + "name": "硕贝德", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603133", + "name": "*ST碳元", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300679", + "name": "电连技术", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002402", + "name": "和而泰", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300136", + "name": "信维通信", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603380", + "name": "易德龙", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002782", + "name": "可立克", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600898", + "name": "ST美讯", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688260", + "name": "昀冢科技", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002861", + "name": "瀛通通讯", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "301123", + "name": "奕东电子", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002866", + "name": "传艺科技", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688007", + "name": "光峰科技", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600745", + "name": "闻泰科技", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300686", + "name": "智动力", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603626", + "name": "科森科技", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600130", + "name": "波导股份", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002369", + "name": "卓翼科技", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300709", + "name": "精研科技", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002885", + "name": "京泉华", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300951", + "name": "博硕科技", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300787", + "name": "海能实业", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600203", + "name": "福日电子", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603327", + "name": "福蓉科技", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300684", + "name": "中石科技", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300976", + "name": "达瑞电子", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300916", + "name": "朗特智能", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "301318", + "name": "维海德", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300857", + "name": "协创数据", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002947", + "name": "恒铭达", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688678", + "name": "福立旺", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688683", + "name": "莱尔科技", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "301135", + "name": "瑞德智能", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "601231", + "name": "环旭电子", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300843", + "name": "胜蓝股份", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603595", + "name": "东尼电子", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300956", + "name": "英力股份", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300602", + "name": "飞荣达", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "872190", + "name": "雷神科技", + "tag": "消费电子", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "002055", + "name": "得润电子", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603890", + "name": "春秋电子", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "833346", + "name": "威贸电子", + "tag": "消费电子", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "301182", + "name": "凯旺科技", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300822", + "name": "贝仕达克", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "001229", + "name": "魅视科技", + "tag": "消费电子", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300045", + "name": "华力创通", + "tag": "卫星", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300098", + "name": "高新兴", + "tag": "卫星", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002512", + "name": "达华智能", + "tag": "卫星", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002413", + "name": "雷科防务", + "tag": "卫星", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002023", + "name": "海特高新", + "tag": "卫星", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002669", + "name": "康达新材", + "tag": "卫星", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002829", + "name": "星网宇达", + "tag": "卫星", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300123", + "name": "亚光科技", + "tag": "卫星", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002446", + "name": "盛路通信", + "tag": "卫星", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300366", + "name": "创意信息", + "tag": "卫星", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002465", + "name": "海格通信", + "tag": "卫星", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300101", + "name": "振芯科技", + "tag": "卫星", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300903", + "name": "科翔股份", + "tag": "卫星", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688418", + "name": "震有科技", + "tag": "卫星", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300447", + "name": "全信股份", + "tag": "卫星", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002624", + "name": "完美世界", + "tag": "鸿蒙", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300624", + "name": "万兴科技", + "tag": "AI", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002558", + "name": "巨人网络", + "tag": "鸿蒙", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002123", + "name": "梦网科技", + "tag": "鸿蒙", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300059", + "name": "东方财富", + "tag": "鸿蒙", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000560", + "name": "我爱我家", + "tag": "鸿蒙", + "hidden_tag": null, + "reason": "" + }, + { + "code": "830799", + "name": "艾融软件", + "tag": "鸿蒙", + "hidden_tag": "北交所", + "reason": "" + }, + { + "code": "002970", + "name": "锐明技术", + "tag": "鸿蒙", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002093", + "name": "国脉科技", + "tag": "鸿蒙", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300033", + "name": "同花顺", + "tag": "鸿蒙", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000829", + "name": "天音控股", + "tag": "鸿蒙", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300608", + "name": "思特奇", + "tag": "鸿蒙", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300542", + "name": "新晨科技", + "tag": "鸿蒙", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300290", + "name": "荣科科技", + "tag": "鸿蒙", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300496", + "name": "中科创达", + "tag": "鸿蒙", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000997", + "name": "新 大 陆", + "tag": "鸿蒙", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300352", + "name": "北信源", + "tag": "鸿蒙", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000004", + "name": "国华网安", + "tag": "鸿蒙", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000555", + "name": "神州信息", + "tag": "鸿蒙", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300579", + "name": "数字认证", + "tag": "鸿蒙", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002261", + "name": "拓维信息", + "tag": "鸿蒙", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300250", + "name": "初灵信息", + "tag": "鸿蒙", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300036", + "name": "超图软件", + "tag": "鸿蒙", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300598", + "name": "诚迈科技", + "tag": "鸿蒙", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300333", + "name": "兆日科技", + "tag": "鸿蒙", + "hidden_tag": null, + "reason": "" + }, + { + "code": "301221", + "name": "光庭信息", + "tag": "鸿蒙", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002869", + "name": "金溢科技", + "tag": "鸿蒙", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000893", + "name": "亚钾国际", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000902", + "name": "新洋丰", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002470", + "name": "金正大", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002274", + "name": "华昌化工", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002588", + "name": "史丹利", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600141", + "name": "兴发集团", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002999", + "name": "天禾股份", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002538", + "name": "司尔特", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600078", + "name": "ST澄星", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300387", + "name": "富邦股份", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002170", + "name": "芭田股份", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000912", + "name": "泸天化", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002556", + "name": "辉隆股份", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600470", + "name": "六国化工", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600691", + "name": "阳煤化工", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603077", + "name": "和邦生物", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603979", + "name": "金诚信", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603993", + "name": "洛阳钼业", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002895", + "name": "川恒股份", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000713", + "name": "丰乐种业", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002237", + "name": "恒邦股份", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "301118", + "name": "恒光股份", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300437", + "name": "清水源", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002942", + "name": "新农股份", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002409", + "name": "雅克科技", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600596", + "name": "新安股份", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600727", + "name": "鲁北化工", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688669", + "name": "聚石化学", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300610", + "name": "晨化股份", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688639", + "name": "华恒生物", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002001", + "name": "新 和 成", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600226", + "name": "瀚叶股份", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002648", + "name": "卫星化学", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002064", + "name": "华峰化学", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600352", + "name": "浙江龙盛", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600319", + "name": "亚星化学", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600425", + "name": "青松建化", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000683", + "name": "远兴能源", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "301149", + "name": "隆华新材", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300834", + "name": "星辉环材", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "301069", + "name": "凯盛新材", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300848", + "name": "美瑞新材", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603980", + "name": "吉华集团", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "000953", + "name": "河化股份", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600618", + "name": "氯碱化工", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300641", + "name": "正丹股份", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "688268", + "name": "华特气体", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600423", + "name": "柳化股份", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "301065", + "name": "本立科技", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "300886", + "name": "华业香料", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600800", + "name": "渤海化学", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "002971", + "name": "和远气体", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "603867", + "name": "新化股份", + "tag": "化工", + "hidden_tag": null, + "reason": "" + }, + { + "code": "600651", + "name": "飞乐音响", + "tag": "VR", + "reason": "" + }, + { + "code": "600602", + "name": "云赛智联", + "tag": "AI", + "reason": "" + }, + { + "code": "600654", + "name": "ST中安", + "tag": "AI", + "reason": "" + }, + { + "code": "000005", + "name": "ST星源", + "tag": "公用", + "reason": "" + }, + { + "code": "000012", + "name": "南 玻A", + "tag": "化工", + "reason": "" + }, + { + "code": "600604", + "name": "市北高新", + "tag": "房地产", + "reason": "" + }, + { + "code": "600608", + "name": "ST沪科", + "tag": "大消费", + "reason": "" + }, + { + "code": "000007", + "name": "*ST全新", + "tag": "房地产", + "reason": "" + }, + { + "code": "000020", + "name": "深华发A", + "tag": "VR", + "reason": "" + }, + { + "code": "600605", + "name": "汇通能源", + "tag": "房地产", + "reason": "" + }, + { + "code": "600609", + "name": "金杯汽车", + "tag": "汽车", + "reason": "" + }, + { + "code": "600611", + "name": "大众交通", + "tag": "公用", + "reason": "" + }, + { + "code": "600612", + "name": "老凤祥", + "tag": "大消费", + "reason": "" + }, + { + "code": "600616", + "name": "金枫酒业", + "tag": "大消费", + "reason": "" + }, + { + "code": "000019", + "name": "深粮控股", + "tag": "大消费", + "reason": "" + }, + { + "code": "600619", + "name": "海立股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "600620", + "name": "天宸股份", + "tag": "公用", + "reason": "" + }, + { + "code": "600623", + "name": "华谊集团", + "tag": "化工", + "reason": "" + }, + { + "code": "000504", + "name": "南华生物", + "tag": "医药", + "reason": "" + }, + { + "code": "600624", + "name": "复旦复华", + "tag": "创投", + "reason": "" + }, + { + "code": "600626", + "name": "申达股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "600633", + "name": "浙数文化", + "tag": "AI", + "reason": "" + }, + { + "code": "000506", + "name": "中润资源", + "tag": "资源", + "reason": "" + }, + { + "code": "000507", + "name": "珠海港", + "tag": "公用", + "reason": "" + }, + { + "code": "000023", + "name": "ST深天", + "tag": "房地产", + "reason": "" + }, + { + "code": "000510", + "name": "新金路", + "tag": "化工", + "reason": "" + }, + { + "code": "000509", + "name": "华塑控股", + "tag": "VR", + "reason": "" + }, + { + "code": "600658", + "name": "电子城", + "tag": "房地产", + "reason": "" + }, + { + "code": "600650", + "name": "锦江在线", + "tag": "公用", + "reason": "" + }, + { + "code": "600660", + "name": "福耀玻璃", + "tag": "化工", + "reason": "" + }, + { + "code": "600661", + "name": "昂立教育", + "tag": "教育", + "reason": "" + }, + { + "code": "600662", + "name": "外服控股", + "tag": "公用", + "reason": "" + }, + { + "code": "600667", + "name": "太极实业", + "tag": "房地产", + "reason": "" + }, + { + "code": "000516", + "name": "国际医学", + "tag": "医药", + "reason": "" + }, + { + "code": "600600", + "name": "青岛啤酒", + "tag": "大消费", + "reason": "" + }, + { + "code": "600676", + "name": "交运股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "000030", + "name": "富奥股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "000520", + "name": "凤凰航运", + "tag": "公用", + "reason": "" + }, + { + "code": "000525", + "name": "ST红太阳", + "tag": "农业", + "reason": "" + }, + { + "code": "000526", + "name": "学大教育", + "tag": "教育", + "reason": "" + }, + { + "code": "600686", + "name": "金龙汽车", + "tag": "汽车", + "reason": "" + }, + { + "code": "000524", + "name": "岭南控股", + "tag": "大消费", + "reason": "" + }, + { + "code": "000528", + "name": "柳 工", + "tag": "房地产", + "reason": "" + }, + { + "code": "600690", + "name": "海尔智家", + "tag": "大消费", + "reason": "" + }, + { + "code": "000541", + "name": "佛山照明", + "tag": "大消费", + "reason": "" + }, + { + "code": "000536", + "name": "华映科技", + "tag": "VR", + "reason": "" + }, + { + "code": "000530", + "name": "冰山冷热", + "tag": "智能机器", + "reason": "" + }, + { + "code": "000544", + "name": "中原环保", + "tag": "公用", + "reason": "" + }, + { + "code": "000548", + "name": "湖南投资", + "tag": "公用", + "reason": "" + }, + { + "code": "600802", + "name": "福建水泥", + "tag": "房地产", + "reason": "" + }, + { + "code": "600801", + "name": "华新水泥", + "tag": "房地产", + "reason": "" + }, + { + "code": "000533", + "name": "顺钠股份", + "tag": "电力", + "reason": "" + }, + { + "code": "600811", + "name": "东方集团", + "tag": "农业", + "reason": "" + }, + { + "code": "600810", + "name": "神马股份", + "tag": "化工", + "reason": "" + }, + { + "code": "000551", + "name": "创元科技", + "tag": "公用", + "reason": "" + }, + { + "code": "000563", + "name": "陕国投A", + "tag": "金融", + "reason": "" + }, + { + "code": "600812", + "name": "华北制药", + "tag": "医药", + "reason": "" + }, + { + "code": "600815", + "name": "厦工股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "600817", + "name": "宇通重工", + "tag": "房地产", + "reason": "" + }, + { + "code": "600820", + "name": "隧道股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "600818", + "name": "中路股份", + "tag": "公用", + "reason": "" + }, + { + "code": "600826", + "name": "兰生股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "600822", + "name": "上海物贸", + "tag": "大消费", + "reason": "" + }, + { + "code": "600833", + "name": "第一医药", + "tag": "医药", + "reason": "" + }, + { + "code": "600834", + "name": "申通地铁", + "tag": "公用", + "reason": "" + }, + { + "code": "600836", + "name": "上海易连", + "tag": "化工", + "reason": "" + }, + { + "code": "600846", + "name": "同济科技", + "tag": "房地产", + "reason": "" + }, + { + "code": "600844", + "name": "丹化科技", + "tag": "化工", + "reason": "" + }, + { + "code": "600841", + "name": "动力新科", + "tag": "汽车", + "reason": "" + }, + { + "code": "600847", + "name": "万里股份", + "tag": "赛道", + "reason": "" + }, + { + "code": "600851", + "name": "海欣股份", + "tag": "医药", + "reason": "" + }, + { + "code": "600853", + "name": "龙建股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "000565", + "name": "渝三峡A", + "tag": "化工", + "reason": "" + }, + { + "code": "000035", + "name": "中国天楹", + "tag": "公用", + "reason": "" + }, + { + "code": "000039", + "name": "中集集团", + "tag": "智能机器", + "reason": "" + }, + { + "code": "000034", + "name": "神州数码", + "tag": "AI", + "reason": "" + }, + { + "code": "000561", + "name": "烽火电子", + "tag": "AI", + "reason": "" + }, + { + "code": "000567", + "name": "海德股份", + "tag": "金融", + "reason": "" + }, + { + "code": "000557", + "name": "西部创业", + "tag": "统一大市场", + "reason": "" + }, + { + "code": "000572", + "name": "海马汽车", + "tag": "汽车", + "reason": "" + }, + { + "code": "000045", + "name": "深纺织A", + "tag": "VR", + "reason": "" + }, + { + "code": "000576", + "name": "甘化科工", + "tag": "化工", + "reason": "" + }, + { + "code": "000046", + "name": "*ST泛海", + "tag": "金融", + "reason": "" + }, + { + "code": "600874", + "name": "创业环保", + "tag": "公用", + "reason": "" + }, + { + "code": "000582", + "name": "北部湾港", + "tag": "公用", + "reason": "" + }, + { + "code": "600880", + "name": "博瑞传播", + "tag": "教育", + "reason": "" + }, + { + "code": "600883", + "name": "博闻科技", + "tag": "房地产", + "reason": "" + }, + { + "code": "000586", + "name": "汇源通信", + "tag": "AI", + "reason": "" + }, + { + "code": "600885", + "name": "宏发股份", + "tag": "电力", + "reason": "" + }, + { + "code": "600889", + "name": "南京化纤", + "tag": "化工", + "reason": "" + }, + { + "code": "000589", + "name": "贵州轮胎", + "tag": "化工", + "reason": "" + }, + { + "code": "000055", + "name": "方大集团", + "tag": "房地产", + "reason": "" + }, + { + "code": "000595", + "name": "宝塔实业", + "tag": "智能机器", + "reason": "" + }, + { + "code": "000599", + "name": "青岛双星", + "tag": "化工", + "reason": "" + }, + { + "code": "600707", + "name": "彩虹股份", + "tag": "VR", + "reason": "" + }, + { + "code": "600703", + "name": "三安光电", + "tag": "VR", + "reason": "" + }, + { + "code": "000598", + "name": "兴蓉环境", + "tag": "公用", + "reason": "" + }, + { + "code": "600897", + "name": "厦门空港", + "tag": "公用", + "reason": "" + }, + { + "code": "600714", + "name": "金瑞矿业", + "tag": "化工", + "reason": "" + }, + { + "code": "600704", + "name": "物产中大", + "tag": "统一大市场", + "reason": "" + }, + { + "code": "600717", + "name": "天津港", + "tag": "公用", + "reason": "" + }, + { + "code": "000401", + "name": "冀东水泥", + "tag": "房地产", + "reason": "" + }, + { + "code": "600718", + "name": "东软集团", + "tag": "AI", + "reason": "" + }, + { + "code": "600722", + "name": "金牛化工", + "tag": "化工", + "reason": "" + }, + { + "code": "600721", + "name": "百花医药", + "tag": "医药", + "reason": "" + }, + { + "code": "000409", + "name": "云鼎科技", + "tag": "AI", + "reason": "" + }, + { + "code": "600715", + "name": "文投控股", + "tag": "AI", + "reason": "" + }, + { + "code": "600713", + "name": "南京医药", + "tag": "医药", + "reason": "" + }, + { + "code": "000056", + "name": "皇庭国际", + "tag": "房地产", + "reason": "" + }, + { + "code": "600724", + "name": "宁波富达", + "tag": "房地产", + "reason": "" + }, + { + "code": "000411", + "name": "英特集团", + "tag": "医药", + "reason": "" + }, + { + "code": "000415", + "name": "渤海租赁", + "tag": "金融", + "reason": "" + }, + { + "code": "000416", + "name": "*ST民控", + "tag": "金融", + "reason": "" + }, + { + "code": "600735", + "name": "新华锦", + "tag": "大消费", + "reason": "" + }, + { + "code": "000420", + "name": "吉林化纤", + "tag": "化工", + "reason": "" + }, + { + "code": "600734", + "name": "ST实达", + "tag": "AI", + "reason": "" + }, + { + "code": "600733", + "name": "北汽蓝谷", + "tag": "汽车", + "reason": "" + }, + { + "code": "600739", + "name": "辽宁成大", + "tag": "大消费", + "reason": "" + }, + { + "code": "600741", + "name": "华域汽车", + "tag": "汽车", + "reason": "" + }, + { + "code": "600742", + "name": "一汽富维", + "tag": "汽车", + "reason": "" + }, + { + "code": "000425", + "name": "徐工机械", + "tag": "房地产", + "reason": "" + }, + { + "code": "000430", + "name": "张家界", + "tag": "大消费", + "reason": "" + }, + { + "code": "600751", + "name": "海航科技", + "tag": "公用", + "reason": "" + }, + { + "code": "000605", + "name": "渤海股份", + "tag": "公用", + "reason": "" + }, + { + "code": "600746", + "name": "江苏索普", + "tag": "化工", + "reason": "" + }, + { + "code": "600756", + "name": "浪潮软件", + "tag": "AI", + "reason": "" + }, + { + "code": "600753", + "name": "庚星股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "600755", + "name": "厦门国贸", + "tag": "大消费", + "reason": "" + }, + { + "code": "600761", + "name": "安徽合力", + "tag": "房地产", + "reason": "" + }, + { + "code": "600754", + "name": "锦江酒店", + "tag": "大消费", + "reason": "" + }, + { + "code": "600749", + "name": "西藏旅游", + "tag": "大消费", + "reason": "" + }, + { + "code": "000615", + "name": "*ST美谷", + "tag": "大消费", + "reason": "" + }, + { + "code": "000619", + "name": "海螺新材", + "tag": "房地产", + "reason": "" + }, + { + "code": "600766", + "name": "*ST园城", + "tag": "房地产", + "reason": "" + }, + { + "code": "600763", + "name": "通策医疗", + "tag": "医药", + "reason": "" + }, + { + "code": "600769", + "name": "祥龙电业", + "tag": "房地产", + "reason": "" + }, + { + "code": "000622", + "name": "恒立实业", + "tag": "汽车", + "reason": "" + }, + { + "code": "600770", + "name": "综艺股份", + "tag": "传媒", + "reason": "" + }, + { + "code": "000632", + "name": "三木集团", + "tag": "化工", + "reason": "" + }, + { + "code": "000638", + "name": "万方发展", + "tag": "农业", + "reason": "" + }, + { + "code": "000652", + "name": "泰达股份", + "tag": "公用", + "reason": "" + }, + { + "code": "000655", + "name": "金岭矿业", + "tag": "房地产", + "reason": "" + }, + { + "code": "000626", + "name": "远大控股", + "tag": "大消费", + "reason": "" + }, + { + "code": "000659", + "name": "珠海中富", + "tag": "化工", + "reason": "" + }, + { + "code": "000672", + "name": "上峰水泥", + "tag": "房地产", + "reason": "" + }, + { + "code": "600783", + "name": "鲁信创投", + "tag": "化工", + "reason": "" + }, + { + "code": "600784", + "name": "鲁银投资", + "tag": "创投", + "reason": "" + }, + { + "code": "600782", + "name": "新钢股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "000677", + "name": "恒天海龙", + "tag": "化工", + "reason": "" + }, + { + "code": "000678", + "name": "襄阳轴承", + "tag": "汽车", + "reason": "" + }, + { + "code": "000061", + "name": "农 产 品", + "tag": "大消费", + "reason": "" + }, + { + "code": "000682", + "name": "东方电子", + "tag": "电力", + "reason": "" + }, + { + "code": "000680", + "name": "山推股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "000685", + "name": "中山公用", + "tag": "公用", + "reason": "" + }, + { + "code": "000062", + "name": "深圳华强", + "tag": "消费电子", + "reason": "" + }, + { + "code": "600793", + "name": "宜宾纸业", + "tag": "大消费", + "reason": "" + }, + { + "code": "600789", + "name": "鲁抗医药", + "tag": "医药", + "reason": "" + }, + { + "code": "000692", + "name": "*ST惠天", + "tag": "公用", + "reason": "" + }, + { + "code": "000700", + "name": "模塑科技", + "tag": "汽车", + "reason": "" + }, + { + "code": "600794", + "name": "保税科技", + "tag": "统一大市场", + "reason": "" + }, + { + "code": "000702", + "name": "正虹科技", + "tag": "农业", + "reason": "" + }, + { + "code": "000697", + "name": "*ST炼石", + "tag": "军工", + "reason": "" + }, + { + "code": "000703", + "name": "恒逸石化", + "tag": "化工", + "reason": "" + }, + { + "code": "600796", + "name": "钱江生化", + "tag": "农业", + "reason": "" + }, + { + "code": "600051", + "name": "宁波联合", + "tag": "房地产", + "reason": "" + }, + { + "code": "000711", + "name": "*ST京蓝", + "tag": "公用", + "reason": "" + }, + { + "code": "000707", + "name": "双环科技", + "tag": "化工", + "reason": "" + }, + { + "code": "000709", + "name": "河钢股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "600797", + "name": "浙大网新", + "tag": "AI", + "reason": "" + }, + { + "code": "600060", + "name": "海信视像", + "tag": "大消费", + "reason": "" + }, + { + "code": "600798", + "name": "宁波海运", + "tag": "公用", + "reason": "" + }, + { + "code": "000721", + "name": "西安饮食", + "tag": "大消费", + "reason": "" + }, + { + "code": "600054", + "name": "黄山旅游", + "tag": "大消费", + "reason": "" + }, + { + "code": "600066", + "name": "宇通客车", + "tag": "汽车", + "reason": "" + }, + { + "code": "600076", + "name": "康欣新材", + "tag": "房地产", + "reason": "" + }, + { + "code": "600063", + "name": "皖维高新", + "tag": "化工", + "reason": "" + }, + { + "code": "000779", + "name": "甘咨询", + "tag": "房地产", + "reason": "" + }, + { + "code": "600057", + "name": "厦门象屿", + "tag": "统一大市场", + "reason": "" + }, + { + "code": "600070", + "name": "ST富润", + "tag": "AI", + "reason": "" + }, + { + "code": "600079", + "name": "人福医药", + "tag": "医药", + "reason": "" + }, + { + "code": "600083", + "name": "博信股份", + "tag": "AI", + "reason": "" + }, + { + "code": "000498", + "name": "山东路桥", + "tag": "房地产", + "reason": "" + }, + { + "code": "000735", + "name": "罗 牛 山", + "tag": "农业", + "reason": "" + }, + { + "code": "600075", + "name": "新疆天业", + "tag": "化工", + "reason": "" + }, + { + "code": "000828", + "name": "东莞控股", + "tag": "公用", + "reason": "" + }, + { + "code": "600089", + "name": "特变电工", + "tag": "电力", + "reason": "" + }, + { + "code": "600097", + "name": "开创国际", + "tag": "农业", + "reason": "" + }, + { + "code": "000782", + "name": "美达股份", + "tag": "化工", + "reason": "" + }, + { + "code": "000752", + "name": "*ST西发", + "tag": "大消费", + "reason": "" + }, + { + "code": "000753", + "name": "漳州发展", + "tag": "汽车", + "reason": "" + }, + { + "code": "000755", + "name": "山西路桥", + "tag": "公用", + "reason": "" + }, + { + "code": "000757", + "name": "浩物股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "000796", + "name": "*ST凯撒", + "tag": "大消费", + "reason": "" + }, + { + "code": "000729", + "name": "燕京啤酒", + "tag": "大消费", + "reason": "" + }, + { + "code": "000868", + "name": "安凯客车", + "tag": "汽车", + "reason": "" + }, + { + "code": "000088", + "name": "盐 田 港", + "tag": "公用", + "reason": "" + }, + { + "code": "000756", + "name": "新华制药", + "tag": "医药", + "reason": "" + }, + { + "code": "600108", + "name": "亚盛集团", + "tag": "农业", + "reason": "" + }, + { + "code": "000816", + "name": "智慧农业", + "tag": "智能机器", + "reason": "" + }, + { + "code": "600128", + "name": "苏豪弘业", + "tag": "大消费", + "reason": "" + }, + { + "code": "000789", + "name": "万年青", + "tag": "房地产", + "reason": "" + }, + { + "code": "600105", + "name": "永鼎股份", + "tag": "AI", + "reason": "" + }, + { + "code": "000836", + "name": "富通信息", + "tag": "AI", + "reason": "" + }, + { + "code": "000823", + "name": "超声电子", + "tag": "消费电子", + "reason": "" + }, + { + "code": "600117", + "name": "*ST西钢", + "tag": "房地产", + "reason": "" + }, + { + "code": "600113", + "name": "浙江东日", + "tag": "农业", + "reason": "" + }, + { + "code": "600132", + "name": "重庆啤酒", + "tag": "大消费", + "reason": "" + }, + { + "code": "600107", + "name": "美尔雅", + "tag": "大消费", + "reason": "" + }, + { + "code": "600104", + "name": "上汽集团", + "tag": "汽车", + "reason": "" + }, + { + "code": "600112", + "name": "ST天成", + "tag": "电力", + "reason": "" + }, + { + "code": "600120", + "name": "浙江东方", + "tag": "金融", + "reason": "" + }, + { + "code": "000889", + "name": "ST中嘉", + "tag": "AI", + "reason": "" + }, + { + "code": "600133", + "name": "东湖高新", + "tag": "房地产", + "reason": "" + }, + { + "code": "600009", + "name": "上海机场", + "tag": "公用", + "reason": "" + }, + { + "code": "000429", + "name": "粤高速A", + "tag": "公用", + "reason": "" + }, + { + "code": "000803", + "name": "山高环能", + "tag": "公用", + "reason": "" + }, + { + "code": "600126", + "name": "杭钢股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "000876", + "name": "新 希 望", + "tag": "农业", + "reason": "" + }, + { + "code": "000880", + "name": "潍柴重机", + "tag": "智能机器", + "reason": "" + }, + { + "code": "600137", + "name": "浪莎股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "000089", + "name": "深圳机场", + "tag": "公用", + "reason": "" + }, + { + "code": "600168", + "name": "武汉控股", + "tag": "公用", + "reason": "" + }, + { + "code": "600127", + "name": "金健米业", + "tag": "农业", + "reason": "" + }, + { + "code": "000801", + "name": "四川九洲", + "tag": "大消费", + "reason": "" + }, + { + "code": "000813", + "name": "德展健康", + "tag": "医药", + "reason": "" + }, + { + "code": "000811", + "name": "冰轮环境", + "tag": "智能机器", + "reason": "" + }, + { + "code": "600165", + "name": "宁科生物", + "tag": "化工", + "reason": "" + }, + { + "code": "600166", + "name": "福田汽车", + "tag": "汽车", + "reason": "" + }, + { + "code": "600170", + "name": "上海建工", + "tag": "房地产", + "reason": "" + }, + { + "code": "000812", + "name": "陕西金叶", + "tag": "化工", + "reason": "" + }, + { + "code": "600180", + "name": "瑞茂通", + "tag": "统一大市场", + "reason": "" + }, + { + "code": "000822", + "name": "山东海化", + "tag": "化工", + "reason": "" + }, + { + "code": "000856", + "name": "冀东装备", + "tag": "智能机器", + "reason": "" + }, + { + "code": "600169", + "name": "太原重工", + "tag": "智能机器", + "reason": "" + }, + { + "code": "000581", + "name": "威孚高科", + "tag": "汽车", + "reason": "" + }, + { + "code": "600183", + "name": "生益科技", + "tag": "消费电子", + "reason": "" + }, + { + "code": "600179", + "name": "安通控股", + "tag": "统一大市场", + "reason": "" + }, + { + "code": "600187", + "name": "国中水务", + "tag": "公用", + "reason": "" + }, + { + "code": "000833", + "name": "粤桂股份", + "tag": "公用", + "reason": "" + }, + { + "code": "600177", + "name": "雅戈尔", + "tag": "大消费", + "reason": "" + }, + { + "code": "000859", + "name": "国风新材", + "tag": "化工", + "reason": "" + }, + { + "code": "600172", + "name": "黄河旋风", + "tag": "化工", + "reason": "" + }, + { + "code": "600218", + "name": "全柴动力", + "tag": "汽车", + "reason": "" + }, + { + "code": "000887", + "name": "中鼎股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "600192", + "name": "长城电工", + "tag": "电力", + "reason": "" + }, + { + "code": "600201", + "name": "生物股份", + "tag": "农业", + "reason": "" + }, + { + "code": "000890", + "name": "法尔胜", + "tag": "智能机器", + "reason": "" + }, + { + "code": "000900", + "name": "现代投资", + "tag": "公用", + "reason": "" + }, + { + "code": "000885", + "name": "城发环境", + "tag": "公用", + "reason": "" + }, + { + "code": "600200", + "name": "江苏吴中", + "tag": "医药", + "reason": "" + }, + { + "code": "000903", + "name": "云内动力", + "tag": "汽车", + "reason": "" + }, + { + "code": "600359", + "name": "新农开发", + "tag": "农业", + "reason": "" + }, + { + "code": "000905", + "name": "厦门港务", + "tag": "公用", + "reason": "" + }, + { + "code": "600182", + "name": "S佳通", + "tag": "化工", + "reason": "" + }, + { + "code": "000909", + "name": "ST数源", + "tag": "AI", + "reason": "" + }, + { + "code": "000913", + "name": "钱江摩托", + "tag": "公用", + "reason": "" + }, + { + "code": "600193", + "name": "创兴资源", + "tag": "房地产", + "reason": "" + }, + { + "code": "600202", + "name": "哈空调", + "tag": "赛道", + "reason": "" + }, + { + "code": "600190", + "name": "锦州港", + "tag": "公用", + "reason": "" + }, + { + "code": "000915", + "name": "华特达因", + "tag": "医药", + "reason": "" + }, + { + "code": "000929", + "name": "兰州黄河", + "tag": "大消费", + "reason": "" + }, + { + "code": "000910", + "name": "大亚圣象", + "tag": "房地产", + "reason": "" + }, + { + "code": "000921", + "name": "海信家电", + "tag": "大消费", + "reason": "" + }, + { + "code": "000923", + "name": "河钢资源", + "tag": "房地产", + "reason": "" + }, + { + "code": "000932", + "name": "华菱钢铁", + "tag": "房地产", + "reason": "" + }, + { + "code": "000935", + "name": "四川双马", + "tag": "房地产", + "reason": "" + }, + { + "code": "600213", + "name": "亚星客车", + "tag": "汽车", + "reason": "" + }, + { + "code": "600220", + "name": "江苏阳光", + "tag": "大消费", + "reason": "" + }, + { + "code": "000948", + "name": "南天信息", + "tag": "AI", + "reason": "" + }, + { + "code": "000949", + "name": "新乡化纤", + "tag": "化工", + "reason": "" + }, + { + "code": "000952", + "name": "广济药业", + "tag": "医药", + "reason": "" + }, + { + "code": "600221", + "name": "海航控股", + "tag": "公用", + "reason": "" + }, + { + "code": "000959", + "name": "首钢股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "000957", + "name": "中通客车", + "tag": "汽车", + "reason": "" + }, + { + "code": "600225", + "name": "卓朗科技", + "tag": "AI", + "reason": "" + }, + { + "code": "000960", + "name": "锡业股份", + "tag": "资源", + "reason": "" + }, + { + "code": "000967", + "name": "盈峰环境", + "tag": "公用", + "reason": "" + }, + { + "code": "600008", + "name": "首创环保", + "tag": "公用", + "reason": "" + }, + { + "code": "000971", + "name": "ST高升", + "tag": "AI", + "reason": "" + }, + { + "code": "600231", + "name": "凌钢股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "600269", + "name": "赣粤高速", + "tag": "公用", + "reason": "" + }, + { + "code": "600259", + "name": "广晟有色", + "tag": "资源", + "reason": "" + }, + { + "code": "000301", + "name": "东方盛虹", + "tag": "化工", + "reason": "" + }, + { + "code": "600258", + "name": "首旅酒店", + "tag": "大消费", + "reason": "" + }, + { + "code": "600233", + "name": "圆通速递", + "tag": "统一大市场", + "reason": "" + }, + { + "code": "000975", + "name": "银泰黄金", + "tag": "资源", + "reason": "" + }, + { + "code": "600237", + "name": "铜峰电子", + "tag": "消费电子", + "reason": "" + }, + { + "code": "600235", + "name": "民丰特纸", + "tag": "大消费", + "reason": "" + }, + { + "code": "000980", + "name": "众泰汽车", + "tag": "汽车", + "reason": "" + }, + { + "code": "600248", + "name": "陕建股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "000981", + "name": "山子股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "000990", + "name": "诚志股份", + "tag": "化工", + "reason": "" + }, + { + "code": "600301", + "name": "华锡有色", + "tag": "大消费", + "reason": "" + }, + { + "code": "600278", + "name": "东方创业", + "tag": "大消费", + "reason": "" + }, + { + "code": "000987", + "name": "越秀资本", + "tag": "金融", + "reason": "" + }, + { + "code": "000996", + "name": "*ST中期", + "tag": "汽车", + "reason": "" + }, + { + "code": "600289", + "name": "ST信通", + "tag": "AI", + "reason": "" + }, + { + "code": "600261", + "name": "阳光照明", + "tag": "大消费", + "reason": "" + }, + { + "code": "600277", + "name": "亿利洁能", + "tag": "化工", + "reason": "" + }, + { + "code": "600279", + "name": "重庆港", + "tag": "公用", + "reason": "" + }, + { + "code": "600265", + "name": "ST景谷", + "tag": "农业", + "reason": "" + }, + { + "code": "600293", + "name": "三峡新材", + "tag": "AI", + "reason": "" + }, + { + "code": "600308", + "name": "华泰股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "000157", + "name": "中联重科", + "tag": "房地产", + "reason": "" + }, + { + "code": "000869", + "name": "张 裕A", + "tag": "大消费", + "reason": "" + }, + { + "code": "600366", + "name": "宁波韵升", + "tag": "资源", + "reason": "" + }, + { + "code": "600290", + "name": "*ST华仪", + "tag": "电力", + "reason": "" + }, + { + "code": "600281", + "name": "华阳新材", + "tag": "资源", + "reason": "" + }, + { + "code": "600297", + "name": "广汇汽车", + "tag": "汽车", + "reason": "" + }, + { + "code": "600243", + "name": "青海华鼎", + "tag": "智能机器", + "reason": "" + }, + { + "code": "000488", + "name": "晨鸣纸业", + "tag": "大消费", + "reason": "" + }, + { + "code": "600337", + "name": "美克家居", + "tag": "房地产", + "reason": "" + }, + { + "code": "600302", + "name": "标准股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "600307", + "name": "酒钢宏兴", + "tag": "房地产", + "reason": "" + }, + { + "code": "600368", + "name": "五洲交通", + "tag": "公用", + "reason": "" + }, + { + "code": "600323", + "name": "瀚蓝环境", + "tag": "公用", + "reason": "" + }, + { + "code": "000726", + "name": "鲁 泰A", + "tag": "大消费", + "reason": "" + }, + { + "code": "600303", + "name": "ST曙光", + "tag": "汽车", + "reason": "" + }, + { + "code": "600398", + "name": "海澜之家", + "tag": "大消费", + "reason": "" + }, + { + "code": "600399", + "name": "抚顺特钢", + "tag": "房地产", + "reason": "" + }, + { + "code": "600388", + "name": "龙净环保", + "tag": "公用", + "reason": "" + }, + { + "code": "600336", + "name": "澳柯玛", + "tag": "大消费", + "reason": "" + }, + { + "code": "000725", + "name": "京东方A", + "tag": "VR", + "reason": "" + }, + { + "code": "600326", + "name": "西藏天路", + "tag": "房地产", + "reason": "" + }, + { + "code": "600377", + "name": "宁沪高速", + "tag": "公用", + "reason": "" + }, + { + "code": "600033", + "name": "福建高速", + "tag": "公用", + "reason": "" + }, + { + "code": "600386", + "name": "北巴传媒", + "tag": "汽车", + "reason": "" + }, + { + "code": "600558", + "name": "大西洋", + "tag": "智能机器", + "reason": "" + }, + { + "code": "600010", + "name": "包钢股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "600315", + "name": "上海家化", + "tag": "大消费", + "reason": "" + }, + { + "code": "600363", + "name": "联创光电", + "tag": "VR", + "reason": "" + }, + { + "code": "600356", + "name": "恒丰纸业", + "tag": "大消费", + "reason": "" + }, + { + "code": "600295", + "name": "鄂尔多斯", + "tag": "房地产", + "reason": "" + }, + { + "code": "600588", + "name": "用友网络", + "tag": "AI", + "reason": "" + }, + { + "code": "600568", + "name": "ST中珠", + "tag": null, + "reason": "" + }, + { + "code": "600321", + "name": "正源股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "600468", + "name": "百利电气", + "tag": "电力", + "reason": "" + }, + { + "code": "600488", + "name": "津药药业", + "tag": "医药", + "reason": "" + }, + { + "code": "600569", + "name": "安阳钢铁", + "tag": "房地产", + "reason": "" + }, + { + "code": "600539", + "name": "狮头股份", + "tag": "公用", + "reason": "" + }, + { + "code": "600418", + "name": "江淮汽车", + "tag": "汽车", + "reason": "" + }, + { + "code": "600599", + "name": "ST熊猫", + "tag": "金融", + "reason": "" + }, + { + "code": "600567", + "name": "山鹰国际", + "tag": "大消费", + "reason": "" + }, + { + "code": "600548", + "name": "深高速", + "tag": "公用", + "reason": "" + }, + { + "code": "600585", + "name": "海螺水泥", + "tag": "房地产", + "reason": "" + }, + { + "code": "600350", + "name": "山东高速", + "tag": "公用", + "reason": "" + }, + { + "code": "600456", + "name": "宝钛股份", + "tag": "资源", + "reason": "" + }, + { + "code": "600496", + "name": "精工钢构", + "tag": "房地产", + "reason": "" + }, + { + "code": "600580", + "name": "卧龙电驱", + "tag": "智能机器", + "reason": "" + }, + { + "code": "600355", + "name": "精伦电子", + "tag": "AI", + "reason": "" + }, + { + "code": "600327", + "name": "大东方", + "tag": "医药", + "reason": "" + }, + { + "code": "600590", + "name": "泰豪科技", + "tag": "电力", + "reason": "" + }, + { + "code": "600561", + "name": "江西长运", + "tag": "公用", + "reason": "" + }, + { + "code": "600416", + "name": "湘电股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "600526", + "name": "菲达环保", + "tag": "公用", + "reason": "" + }, + { + "code": "600592", + "name": "龙溪股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "600515", + "name": "海南机场", + "tag": "公用", + "reason": "" + }, + { + "code": "600577", + "name": "精达股份", + "tag": "电力", + "reason": "" + }, + { + "code": "600371", + "name": "万向德农", + "tag": "农业", + "reason": "" + }, + { + "code": "600571", + "name": "信雅达", + "tag": "AI", + "reason": "" + }, + { + "code": "600353", + "name": "旭光电子", + "tag": "消费电子", + "reason": "" + }, + { + "code": "600563", + "name": "法拉电子", + "tag": "消费电子", + "reason": "" + }, + { + "code": "600512", + "name": "腾达建设", + "tag": "房地产", + "reason": "" + }, + { + "code": "600012", + "name": "皖通高速", + "tag": "公用", + "reason": "" + }, + { + "code": "600537", + "name": "亿晶光电", + "tag": "赛道", + "reason": "" + }, + { + "code": "600573", + "name": "惠泉啤酒", + "tag": "大消费", + "reason": "" + }, + { + "code": "600521", + "name": "华海药业", + "tag": "医药", + "reason": "" + }, + { + "code": "600370", + "name": "三房巷", + "tag": "化工", + "reason": "" + }, + { + "code": "600513", + "name": "联环药业", + "tag": "医药", + "reason": "" + }, + { + "code": "600575", + "name": "淮河能源", + "tag": "统一大市场", + "reason": "" + }, + { + "code": "600375", + "name": "汉马科技", + "tag": "房地产", + "reason": "" + }, + { + "code": "600502", + "name": "安徽建工", + "tag": "房地产", + "reason": "" + }, + { + "code": "600481", + "name": "双良节能", + "tag": "赛道", + "reason": "" + }, + { + "code": "600004", + "name": "白云机场", + "tag": "公用", + "reason": "" + }, + { + "code": "600459", + "name": "贵研铂业", + "tag": "资源", + "reason": "" + }, + { + "code": "600251", + "name": "冠农股份", + "tag": "农业", + "reason": "" + }, + { + "code": "600273", + "name": "嘉化能源", + "tag": "化工", + "reason": "" + }, + { + "code": "600031", + "name": "三一重工", + "tag": "房地产", + "reason": "" + }, + { + "code": "600475", + "name": "华光环能", + "tag": "赛道", + "reason": "" + }, + { + "code": "600020", + "name": "中原高速", + "tag": "公用", + "reason": "" + }, + { + "code": "600487", + "name": "亨通光电", + "tag": "AI", + "reason": "" + }, + { + "code": "600547", + "name": "山东黄金", + "tag": "资源", + "reason": "" + }, + { + "code": "600462", + "name": "ST九有", + "tag": "AI", + "reason": "" + }, + { + "code": "600507", + "name": "方大特钢", + "tag": "房地产", + "reason": "" + }, + { + "code": "600477", + "name": "杭萧钢构", + "tag": "房地产", + "reason": "" + }, + { + "code": "600527", + "name": "江南高纤", + "tag": "化工", + "reason": "" + }, + { + "code": "600545", + "name": "卓郎智能", + "tag": "智能机器", + "reason": "" + }, + { + "code": "600570", + "name": "恒生电子", + "tag": "AI", + "reason": "" + }, + { + "code": "600540", + "name": "新赛股份", + "tag": "农业", + "reason": "" + }, + { + "code": "600249", + "name": "两面针", + "tag": "大消费", + "reason": "" + }, + { + "code": "000100", + "name": "TCL科技", + "tag": "VR", + "reason": "" + }, + { + "code": "600438", + "name": "通威股份", + "tag": "赛道", + "reason": "" + }, + { + "code": "600035", + "name": "楚天高速", + "tag": "公用", + "reason": "" + }, + { + "code": "600543", + "name": "*ST莫高", + "tag": "大消费", + "reason": "" + }, + { + "code": "600455", + "name": "博通股份", + "tag": "教育", + "reason": "" + }, + { + "code": "600405", + "name": "动力源", + "tag": "赛道", + "reason": "" + }, + { + "code": "600960", + "name": "渤海汽车", + "tag": "汽车", + "reason": "" + }, + { + "code": "600988", + "name": "赤峰黄金", + "tag": "资源", + "reason": "" + }, + { + "code": "600410", + "name": "华胜天成", + "tag": "AI", + "reason": "" + }, + { + "code": "600114", + "name": "东睦股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "600992", + "name": "贵绳股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "600491", + "name": "龙元建设", + "tag": "房地产", + "reason": "" + }, + { + "code": "600461", + "name": "洪城环境", + "tag": "公用", + "reason": "" + }, + { + "code": "600421", + "name": "华嵘控股", + "tag": "智能机器", + "reason": "" + }, + { + "code": "600966", + "name": "博汇纸业", + "tag": "大消费", + "reason": "" + }, + { + "code": "600975", + "name": "新五丰", + "tag": "农业", + "reason": "" + }, + { + "code": "002003", + "name": "伟星股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "002004", + "name": "华邦健康", + "tag": "农业", + "reason": "" + }, + { + "code": "002005", + "name": "ST德豪", + "tag": "大消费", + "reason": "" + }, + { + "code": "002002", + "name": "ST鸿达", + "tag": "化工", + "reason": "" + }, + { + "code": "600022", + "name": "山东钢铁", + "tag": "房地产", + "reason": "" + }, + { + "code": "002010", + "name": "传化智联", + "tag": "统一大市场", + "reason": "" + }, + { + "code": "600981", + "name": "汇鸿集团", + "tag": "大消费", + "reason": "" + }, + { + "code": "600984", + "name": "建设机械", + "tag": "房地产", + "reason": "" + }, + { + "code": "002014", + "name": "永新股份", + "tag": "化工", + "reason": "" + }, + { + "code": "600965", + "name": "福成股份", + "tag": "农业", + "reason": "" + }, + { + "code": "002021", + "name": "*ST中捷", + "tag": "智能机器", + "reason": "" + }, + { + "code": "600983", + "name": "惠而浦", + "tag": "大消费", + "reason": "" + }, + { + "code": "002028", + "name": "思源电气", + "tag": "电力", + "reason": "" + }, + { + "code": "002029", + "name": "七 匹 狼", + "tag": "大消费", + "reason": "" + }, + { + "code": "600987", + "name": "航民股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "002032", + "name": "苏 泊 尔", + "tag": "大消费", + "reason": "" + }, + { + "code": "002034", + "name": "旺能环境", + "tag": "公用", + "reason": "" + }, + { + "code": "002035", + "name": "华帝股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "002036", + "name": "联创电子", + "tag": "VR", + "reason": "" + }, + { + "code": "002040", + "name": "南 京 港", + "tag": "公用", + "reason": "" + }, + { + "code": "002041", + "name": "登海种业", + "tag": "农业", + "reason": "" + }, + { + "code": "002043", + "name": "兔 宝 宝", + "tag": "房地产", + "reason": "" + }, + { + "code": "002044", + "name": "美年健康", + "tag": "医药", + "reason": "" + }, + { + "code": "002047", + "name": "宝鹰股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "002048", + "name": "宁波华翔", + "tag": "汽车", + "reason": "" + }, + { + "code": "002050", + "name": "三花智控", + "tag": "大消费", + "reason": "" + }, + { + "code": "002053", + "name": "云南能投", + "tag": "化工", + "reason": "" + }, + { + "code": "002054", + "name": "德美化工", + "tag": "化工", + "reason": "" + }, + { + "code": "002058", + "name": "威尔泰", + "tag": "电力", + "reason": "" + }, + { + "code": "002060", + "name": "粤 水 电", + "tag": "房地产", + "reason": "" + }, + { + "code": "002062", + "name": "宏润建设", + "tag": "房地产", + "reason": "" + }, + { + "code": "002061", + "name": "浙江交科", + "tag": "房地产", + "reason": "" + }, + { + "code": "002065", + "name": "东华软件", + "tag": "AI", + "reason": "" + }, + { + "code": "002069", + "name": "獐子岛", + "tag": "农业", + "reason": "" + }, + { + "code": "601588", + "name": "北辰实业", + "tag": "房地产", + "reason": "" + }, + { + "code": "600017", + "name": "日照港", + "tag": "公用", + "reason": "" + }, + { + "code": "002072", + "name": "凯瑞德", + "tag": "AI", + "reason": "" + }, + { + "code": "002075", + "name": "沙钢股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "600018", + "name": "上港集团", + "tag": "公用", + "reason": "" + }, + { + "code": "002078", + "name": "太阳纸业", + "tag": "大消费", + "reason": "" + }, + { + "code": "002081", + "name": "金 螳 螂", + "tag": "房地产", + "reason": "" + }, + { + "code": "002084", + "name": "海鸥住工", + "tag": "房地产", + "reason": "" + }, + { + "code": "002083", + "name": "孚日股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "002086", + "name": "*ST东洋", + "tag": "农业", + "reason": "" + }, + { + "code": "002085", + "name": "万丰奥威", + "tag": "汽车", + "reason": "" + }, + { + "code": "002087", + "name": "*ST新纺", + "tag": "大消费", + "reason": "" + }, + { + "code": "002088", + "name": "鲁阳节能", + "tag": "房地产", + "reason": "" + }, + { + "code": "002092", + "name": "中泰化学", + "tag": "化工", + "reason": "" + }, + { + "code": "002094", + "name": "青岛金王", + "tag": "大消费", + "reason": "" + }, + { + "code": "002095", + "name": "生 意 宝", + "tag": "AI", + "reason": "" + }, + { + "code": "002098", + "name": "浔兴股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "002096", + "name": "易普力", + "tag": "化工", + "reason": "" + }, + { + "code": "002099", + "name": "海翔药业", + "tag": "医药", + "reason": "" + }, + { + "code": "002100", + "name": "天康生物", + "tag": "农业", + "reason": "" + }, + { + "code": "002101", + "name": "广东鸿图", + "tag": "汽车", + "reason": "" + }, + { + "code": "002102", + "name": "冠福股份", + "tag": "医药", + "reason": "" + }, + { + "code": "002104", + "name": "恒宝股份", + "tag": "AI", + "reason": "" + }, + { + "code": "002105", + "name": "信隆健康", + "tag": "公用", + "reason": "" + }, + { + "code": "601002", + "name": "晋亿实业", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002111", + "name": "威海广泰", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002110", + "name": "三钢闽光", + "tag": "房地产", + "reason": "" + }, + { + "code": "002109", + "name": "兴化股份", + "tag": "化工", + "reason": "" + }, + { + "code": "002112", + "name": "三变科技", + "tag": "电力", + "reason": "" + }, + { + "code": "601003", + "name": "柳钢股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "002117", + "name": "东港股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "002120", + "name": "韵达股份", + "tag": "统一大市场", + "reason": "" + }, + { + "code": "002122", + "name": "汇洲智能", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002124", + "name": "天邦食品", + "tag": "农业", + "reason": "" + }, + { + "code": "601007", + "name": "金陵饭店", + "tag": "大消费", + "reason": "" + }, + { + "code": "002127", + "name": "南极电商", + "tag": "大消费", + "reason": "" + }, + { + "code": "002126", + "name": "银轮股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "002130", + "name": "沃尔核材", + "tag": "化工", + "reason": "" + }, + { + "code": "601008", + "name": "连云港", + "tag": "公用", + "reason": "" + }, + { + "code": "000338", + "name": "潍柴动力", + "tag": "汽车", + "reason": "" + }, + { + "code": "002134", + "name": "天津普林", + "tag": "消费电子", + "reason": "" + }, + { + "code": "002135", + "name": "东南网架", + "tag": "房地产", + "reason": "" + }, + { + "code": "002137", + "name": "实益达", + "tag": "消费电子", + "reason": "" + }, + { + "code": "002138", + "name": "顺络电子", + "tag": "消费电子", + "reason": "" + }, + { + "code": "002144", + "name": "宏达高科", + "tag": "大消费", + "reason": "" + }, + { + "code": "002146", + "name": "荣盛发展", + "tag": "房地产", + "reason": "" + }, + { + "code": "002148", + "name": "北纬科技", + "tag": "AI", + "reason": "" + }, + { + "code": "002150", + "name": "通润装备", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002149", + "name": "西部材料", + "tag": "资源", + "reason": "" + }, + { + "code": "002153", + "name": "石基信息", + "tag": "AI", + "reason": "" + }, + { + "code": "002151", + "name": "北斗星通", + "tag": "AI", + "reason": "" + }, + { + "code": "002152", + "name": "广电运通", + "tag": "AI", + "reason": "" + }, + { + "code": "002154", + "name": "报 喜 鸟", + "tag": "大消费", + "reason": "" + }, + { + "code": "002155", + "name": "湖南黄金", + "tag": "资源", + "reason": "" + }, + { + "code": "002159", + "name": "三特索道", + "tag": "大消费", + "reason": "" + }, + { + "code": "002157", + "name": "*ST 正邦", + "tag": "农业", + "reason": "" + }, + { + "code": "002158", + "name": "汉钟精机", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002161", + "name": "远 望 谷", + "tag": "消费电子", + "reason": "" + }, + { + "code": "002163", + "name": "海南发展", + "tag": "化工", + "reason": "" + }, + { + "code": "002164", + "name": "宁波东力", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002166", + "name": "莱茵生物", + "tag": "医药", + "reason": "" + }, + { + "code": "002167", + "name": "东方锆业", + "tag": "资源", + "reason": "" + }, + { + "code": "002165", + "name": "红 宝 丽", + "tag": "化工", + "reason": "" + }, + { + "code": "002169", + "name": "智光电气", + "tag": "电力", + "reason": "" + }, + { + "code": "002175", + "name": "东方智造", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002180", + "name": "纳思达", + "tag": "AI", + "reason": "" + }, + { + "code": "002188", + "name": "中天服务", + "tag": "房地产", + "reason": "" + }, + { + "code": "002191", + "name": "劲嘉股份", + "tag": "化工", + "reason": "" + }, + { + "code": "002194", + "name": "武汉凡谷", + "tag": "AI", + "reason": "" + }, + { + "code": "002193", + "name": "如意集团", + "tag": "大消费", + "reason": "" + }, + { + "code": "002196", + "name": "方正电机", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002199", + "name": "东晶电子", + "tag": "消费电子", + "reason": "" + }, + { + "code": "002200", + "name": "ST交投", + "tag": "农业", + "reason": "" + }, + { + "code": "002202", + "name": "金风科技", + "tag": "赛道", + "reason": "" + }, + { + "code": "002201", + "name": "正威新材", + "tag": "化工", + "reason": "" + }, + { + "code": "002204", + "name": "大连重工", + "tag": "房地产", + "reason": "" + }, + { + "code": "002206", + "name": "海 利 得", + "tag": "化工", + "reason": "" + }, + { + "code": "002207", + "name": "准油股份", + "tag": "公用", + "reason": "" + }, + { + "code": "002210", + "name": "飞马国际", + "tag": "统一大市场", + "reason": "" + }, + { + "code": "002212", + "name": "天融信", + "tag": "AI", + "reason": "" + }, + { + "code": "002215", + "name": "诺 普 信", + "tag": "农业", + "reason": "" + }, + { + "code": "002217", + "name": "合力泰", + "tag": "VR", + "reason": "" + }, + { + "code": "002218", + "name": "拓日新能", + "tag": "赛道", + "reason": "" + }, + { + "code": "601958", + "name": "金钼股份", + "tag": "资源", + "reason": "" + }, + { + "code": "601899", + "name": "紫金矿业", + "tag": "资源", + "reason": "" + }, + { + "code": "002224", + "name": "三 力 士", + "tag": "化工", + "reason": "" + }, + { + "code": "002225", + "name": "濮耐股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "002227", + "name": "奥 特 迅", + "tag": "赛道", + "reason": "" + }, + { + "code": "002231", + "name": "奥维通信", + "tag": "AI", + "reason": "" + }, + { + "code": "002234", + "name": "民和股份", + "tag": "农业", + "reason": "" + }, + { + "code": "002233", + "name": "塔牌集团", + "tag": "房地产", + "reason": "" + }, + { + "code": "002236", + "name": "大华股份", + "tag": "AI", + "reason": "" + }, + { + "code": "002239", + "name": "奥特佳", + "tag": "汽车", + "reason": "" + }, + { + "code": "002242", + "name": "九阳股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "002243", + "name": "力合科创", + "tag": "大消费", + "reason": "" + }, + { + "code": "002248", + "name": "华东数控", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002247", + "name": "聚力文化", + "tag": "房地产", + "reason": "" + }, + { + "code": "002250", + "name": "联化科技", + "tag": "农业", + "reason": "" + }, + { + "code": "002249", + "name": "大洋电机", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002253", + "name": "川大智胜", + "tag": "AI", + "reason": "" + }, + { + "code": "002254", + "name": "泰和新材", + "tag": "化工", + "reason": "" + }, + { + "code": "002262", + "name": "恩华药业", + "tag": "医药", + "reason": "" + }, + { + "code": "002263", + "name": "大东南", + "tag": "化工", + "reason": "" + }, + { + "code": "002264", + "name": "新 华 都", + "tag": "AI", + "reason": "" + }, + { + "code": "002266", + "name": "浙富控股", + "tag": "公用", + "reason": "" + }, + { + "code": "002270", + "name": "华明装备", + "tag": "电力", + "reason": "" + }, + { + "code": "002271", + "name": "东方雨虹", + "tag": "房地产", + "reason": "" + }, + { + "code": "002273", + "name": "水晶光电", + "tag": "VR", + "reason": "" + }, + { + "code": "002272", + "name": "川润股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "601727", + "name": "上海电气", + "tag": "赛道", + "reason": "" + }, + { + "code": "002276", + "name": "万马股份", + "tag": "电力", + "reason": "" + }, + { + "code": "601107", + "name": "四川成渝", + "tag": "公用", + "reason": "" + }, + { + "code": "002279", + "name": "久其软件", + "tag": "AI", + "reason": "" + }, + { + "code": "002278", + "name": "神开股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002282", + "name": "博深股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002280", + "name": "联络互动", + "tag": "AI", + "reason": "" + }, + { + "code": "002285", + "name": "世联行", + "tag": "房地产", + "reason": "" + }, + { + "code": "002284", + "name": "亚太股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "002289", + "name": "ST宇顺", + "tag": "VR", + "reason": "" + }, + { + "code": "002290", + "name": "禾盛新材", + "tag": "大消费", + "reason": "" + }, + { + "code": "002291", + "name": "遥望科技", + "tag": "大消费", + "reason": "" + }, + { + "code": "002293", + "name": "罗莱生活", + "tag": "大消费", + "reason": "" + }, + { + "code": "002296", + "name": "辉煌科技", + "tag": "AI", + "reason": "" + }, + { + "code": "002297", + "name": "博云新材", + "tag": "军工", + "reason": "" + }, + { + "code": "002299", + "name": "圣农发展", + "tag": "农业", + "reason": "" + }, + { + "code": "002301", + "name": "齐心集团", + "tag": "大消费", + "reason": "" + }, + { + "code": "002300", + "name": "太阳电缆", + "tag": "电力", + "reason": "" + }, + { + "code": "300012", + "name": "华测检测", + "tag": "专业服务", + "reason": "" + }, + { + "code": "300010", + "name": "*ST豆神", + "tag": "教育", + "reason": "" + }, + { + "code": "300015", + "name": "爱尔眼科", + "tag": "医药", + "reason": "" + }, + { + "code": "300013", + "name": "新宁物流", + "tag": "统一大市场", + "reason": "" + }, + { + "code": "300011", + "name": "鼎汉技术", + "tag": "公用", + "reason": "" + }, + { + "code": "300025", + "name": "华星创业", + "tag": "AI", + "reason": "" + }, + { + "code": "300021", + "name": "大禹节水", + "tag": "农业", + "reason": "" + }, + { + "code": "300008", + "name": "天海防务", + "tag": "公用", + "reason": "" + }, + { + "code": "300001", + "name": "特锐德", + "tag": "电力", + "reason": "" + }, + { + "code": "300005", + "name": "探路者", + "tag": "大消费", + "reason": "" + }, + { + "code": "300022", + "name": "吉峰科技", + "tag": "房地产", + "reason": "" + }, + { + "code": "300004", + "name": "南风股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002303", + "name": "美盈森", + "tag": "化工", + "reason": "" + }, + { + "code": "002307", + "name": "北新路桥", + "tag": "房地产", + "reason": "" + }, + { + "code": "002308", + "name": "威创股份", + "tag": "AI", + "reason": "" + }, + { + "code": "002311", + "name": "海大集团", + "tag": "农业", + "reason": "" + }, + { + "code": "002310", + "name": "东方园林", + "tag": "公用", + "reason": "" + }, + { + "code": "002313", + "name": "日海智能", + "tag": "AI", + "reason": "" + }, + { + "code": "002315", + "name": "焦点科技", + "tag": "AI", + "reason": "" + }, + { + "code": "002316", + "name": "亚联发展", + "tag": "AI", + "reason": "" + }, + { + "code": "002319", + "name": "乐通股份", + "tag": "化工", + "reason": "" + }, + { + "code": "002318", + "name": "久立特材", + "tag": "房地产", + "reason": "" + }, + { + "code": "002321", + "name": "华英农业", + "tag": "农业", + "reason": "" + }, + { + "code": "002322", + "name": "理工能科", + "tag": "AI", + "reason": "" + }, + { + "code": "002325", + "name": "洪涛股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "300031", + "name": "宝通科技", + "tag": "AI", + "reason": "" + }, + { + "code": "300029", + "name": "ST天龙", + "tag": "赛道", + "reason": "" + }, + { + "code": "002327", + "name": "富安娜", + "tag": "大消费", + "reason": "" + }, + { + "code": "002328", + "name": "新朋股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "002331", + "name": "皖通科技", + "tag": "AI", + "reason": "" + }, + { + "code": "300040", + "name": "九洲集团", + "tag": "电力", + "reason": "" + }, + { + "code": "002332", + "name": "仙琚制药", + "tag": "医药", + "reason": "" + }, + { + "code": "002335", + "name": "科华数据", + "tag": "赛道", + "reason": "" + }, + { + "code": "300050", + "name": "世纪鼎利", + "tag": "AI", + "reason": "" + }, + { + "code": "300043", + "name": "星辉娱乐", + "tag": "AI", + "reason": "" + }, + { + "code": "300047", + "name": "天源迪科", + "tag": "AI", + "reason": "" + }, + { + "code": "300048", + "name": "合康新能", + "tag": "智能机器", + "reason": "" + }, + { + "code": "601877", + "name": "正泰电器", + "tag": "电力", + "reason": "" + }, + { + "code": "002339", + "name": "积成电子", + "tag": "电力", + "reason": "" + }, + { + "code": "002345", + "name": "潮宏基", + "tag": "大消费", + "reason": "" + }, + { + "code": "002350", + "name": "北京科锐", + "tag": "电力", + "reason": "" + }, + { + "code": "002352", + "name": "顺丰控股", + "tag": "统一大市场", + "reason": "" + }, + { + "code": "002353", + "name": "杰瑞股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002354", + "name": "天娱数科", + "tag": "AI", + "reason": "" + }, + { + "code": "002355", + "name": "兴民智通", + "tag": "汽车", + "reason": "" + }, + { + "code": "300051", + "name": "三五互联", + "tag": "AI", + "reason": "" + }, + { + "code": "300054", + "name": "鼎龙股份", + "tag": "半导体", + "reason": "" + }, + { + "code": "300056", + "name": "中创环保", + "tag": "公用", + "reason": "" + }, + { + "code": "300055", + "name": "万邦达", + "tag": "公用", + "reason": "" + }, + { + "code": "002360", + "name": "同德化工", + "tag": "化工", + "reason": "" + }, + { + "code": "002361", + "name": "神剑股份", + "tag": "化工", + "reason": "" + }, + { + "code": "002365", + "name": "永安药业", + "tag": "医药", + "reason": "" + }, + { + "code": "002364", + "name": "中恒电气", + "tag": "赛道", + "reason": "" + }, + { + "code": "002366", + "name": "融发核电", + "tag": "赛道", + "reason": "" + }, + { + "code": "002370", + "name": "亚太药业", + "tag": "医药", + "reason": "" + }, + { + "code": "002372", + "name": "伟星新材", + "tag": "房地产", + "reason": "" + }, + { + "code": "002373", + "name": "千方科技", + "tag": "AI", + "reason": "" + }, + { + "code": "601188", + "name": "龙江交通", + "tag": "公用", + "reason": "" + }, + { + "code": "601518", + "name": "吉林高速", + "tag": "公用", + "reason": "" + }, + { + "code": "002375", + "name": "亚厦股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "300066", + "name": "三川智慧", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300065", + "name": "海兰信", + "tag": "公用", + "reason": "" + }, + { + "code": "601158", + "name": "重庆水务", + "tag": "公用", + "reason": "" + }, + { + "code": "002378", + "name": "章源钨业", + "tag": "资源", + "reason": "" + }, + { + "code": "002383", + "name": "合众思壮", + "tag": "AI", + "reason": "" + }, + { + "code": "002384", + "name": "东山精密", + "tag": "消费电子", + "reason": "" + }, + { + "code": "002385", + "name": "大北农", + "tag": "农业", + "reason": "" + }, + { + "code": "002387", + "name": "维信诺", + "tag": "VR", + "reason": "" + }, + { + "code": "002388", + "name": "新亚制程", + "tag": "消费电子", + "reason": "" + }, + { + "code": "002391", + "name": "长青股份", + "tag": "农业", + "reason": "" + }, + { + "code": "300067", + "name": "安诺其", + "tag": "化工", + "reason": "" + }, + { + "code": "300069", + "name": "金利华电", + "tag": "电力", + "reason": "" + }, + { + "code": "002393", + "name": "力生制药", + "tag": "医药", + "reason": "" + }, + { + "code": "002394", + "name": "联发股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "002392", + "name": "北京利尔", + "tag": "房地产", + "reason": "" + }, + { + "code": "300075", + "name": "数字政通", + "tag": "AI", + "reason": "" + }, + { + "code": "300074", + "name": "华平股份", + "tag": "AI", + "reason": "" + }, + { + "code": "300072", + "name": "海新能科", + "tag": "化工", + "reason": "" + }, + { + "code": "601369", + "name": "陕鼓动力", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002397", + "name": "梦洁股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "300079", + "name": "数码视讯", + "tag": "AI", + "reason": "" + }, + { + "code": "300078", + "name": "思创医惠", + "tag": "AI", + "reason": "" + }, + { + "code": "002398", + "name": "垒知集团", + "tag": "房地产", + "reason": "" + }, + { + "code": "002404", + "name": "嘉欣丝绸", + "tag": "大消费", + "reason": "" + }, + { + "code": "002408", + "name": "齐翔腾达", + "tag": "化工", + "reason": "" + }, + { + "code": "002405", + "name": "四维图新", + "tag": "AI", + "reason": "" + }, + { + "code": "002406", + "name": "远东传动", + "tag": "汽车", + "reason": "" + }, + { + "code": "300083", + "name": "创世纪", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300084", + "name": "海默科技", + "tag": "公用", + "reason": "" + }, + { + "code": "002410", + "name": "广联达", + "tag": "AI", + "reason": "" + }, + { + "code": "300085", + "name": "银之杰", + "tag": "AI", + "reason": "" + }, + { + "code": "300088", + "name": "长信科技", + "tag": "VR", + "reason": "" + }, + { + "code": "002420", + "name": "毅昌科技", + "tag": "大消费", + "reason": "" + }, + { + "code": "002428", + "name": "云南锗业", + "tag": "资源", + "reason": "" + }, + { + "code": "002425", + "name": "凯撒文化", + "tag": "AI", + "reason": "" + }, + { + "code": "002429", + "name": "兆驰股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "002431", + "name": "棕榈股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "002430", + "name": "杭氧股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002434", + "name": "万里扬", + "tag": "汽车", + "reason": "" + }, + { + "code": "002436", + "name": "兴森科技", + "tag": "消费电子", + "reason": "" + }, + { + "code": "002396", + "name": "星网锐捷", + "tag": "AI", + "reason": "" + }, + { + "code": "002438", + "name": "江苏神通", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002439", + "name": "启明星辰", + "tag": "AI", + "reason": "" + }, + { + "code": "300091", + "name": "金通灵", + "tag": "智能机器", + "reason": "" + }, + { + "code": "601000", + "name": "唐山港", + "tag": "公用", + "reason": "" + }, + { + "code": "002440", + "name": "闰土股份", + "tag": "化工", + "reason": "" + }, + { + "code": "002443", + "name": "金洲管道", + "tag": "房地产", + "reason": "" + }, + { + "code": "300092", + "name": "科新机电", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002445", + "name": "中南文化", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002414", + "name": "高德红外", + "tag": "消费电子", + "reason": "" + }, + { + "code": "002448", + "name": "中原内配", + "tag": "汽车", + "reason": "" + }, + { + "code": "002451", + "name": "摩恩电气", + "tag": "电力", + "reason": "" + }, + { + "code": "002454", + "name": "松芝股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "002453", + "name": "华软科技", + "tag": "化工", + "reason": "" + }, + { + "code": "002452", + "name": "长高电新", + "tag": "电力", + "reason": "" + }, + { + "code": "300095", + "name": "华伍股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300096", + "name": "ST易联众", + "tag": "AI", + "reason": "" + }, + { + "code": "601717", + "name": "郑煤机", + "tag": "汽车", + "reason": "" + }, + { + "code": "002456", + "name": "欧菲光", + "tag": "VR", + "reason": "" + }, + { + "code": "002457", + "name": "青龙管业", + "tag": "房地产", + "reason": "" + }, + { + "code": "002458", + "name": "益生股份", + "tag": "农业", + "reason": "" + }, + { + "code": "300102", + "name": "乾照光电", + "tag": "VR", + "reason": "" + }, + { + "code": "300103", + "name": "达刚控股", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002461", + "name": "珠江啤酒", + "tag": "大消费", + "reason": "" + }, + { + "code": "002463", + "name": "沪电股份", + "tag": "消费电子", + "reason": "" + }, + { + "code": "300107", + "name": "建新股份", + "tag": "化工", + "reason": "" + }, + { + "code": "300110", + "name": "华仁药业", + "tag": "医药", + "reason": "" + }, + { + "code": "300109", + "name": "新开源", + "tag": "化工", + "reason": "" + }, + { + "code": "300111", + "name": "向日葵", + "tag": "医药", + "reason": "" + }, + { + "code": "300118", + "name": "东方日升", + "tag": "赛道", + "reason": "" + }, + { + "code": "002468", + "name": "申通快递", + "tag": "统一大市场", + "reason": "" + }, + { + "code": "002469", + "name": "三维化学", + "tag": "化工", + "reason": "" + }, + { + "code": "002467", + "name": "二六三", + "tag": "AI", + "reason": "" + }, + { + "code": "002474", + "name": "榕基软件", + "tag": "AI", + "reason": "" + }, + { + "code": "002476", + "name": "宝莫股份", + "tag": "化工", + "reason": "" + }, + { + "code": "300119", + "name": "瑞普生物", + "tag": "农业", + "reason": "" + }, + { + "code": "300120", + "name": "经纬辉开", + "tag": "VR", + "reason": "" + }, + { + "code": "300121", + "name": "阳谷华泰", + "tag": "化工", + "reason": "" + }, + { + "code": "002479", + "name": "富春环保", + "tag": "公用", + "reason": "" + }, + { + "code": "002480", + "name": "新筑股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002478", + "name": "常宝股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "601018", + "name": "宁波港", + "tag": "公用", + "reason": "" + }, + { + "code": "002482", + "name": "*ST广田", + "tag": "房地产", + "reason": "" + }, + { + "code": "002483", + "name": "润邦股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002484", + "name": "江海股份", + "tag": "消费电子", + "reason": "" + }, + { + "code": "601177", + "name": "杭齿前进", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300127", + "name": "银河磁体", + "tag": "资源", + "reason": "" + }, + { + "code": "002486", + "name": "嘉麟杰", + "tag": "大消费", + "reason": "" + }, + { + "code": "002485", + "name": "ST雪发", + "tag": "大消费", + "reason": "" + }, + { + "code": "300130", + "name": "新国都", + "tag": "AI", + "reason": "" + }, + { + "code": "300129", + "name": "泰胜风能", + "tag": "赛道", + "reason": "" + }, + { + "code": "002489", + "name": "浙江永强", + "tag": "大消费", + "reason": "" + }, + { + "code": "002488", + "name": "金固股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "002490", + "name": "山东墨龙", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002491", + "name": "通鼎互联", + "tag": "AI", + "reason": "" + }, + { + "code": "300132", + "name": "青松股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "300135", + "name": "宝利国际", + "tag": "化工", + "reason": "" + }, + { + "code": "002492", + "name": "恒基达鑫", + "tag": "统一大市场", + "reason": "" + }, + { + "code": "002493", + "name": "荣盛石化", + "tag": "化工", + "reason": "" + }, + { + "code": "002494", + "name": "华斯股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "300137", + "name": "先河环保", + "tag": "公用", + "reason": "" + }, + { + "code": "300138", + "name": "晨光生物", + "tag": "农业", + "reason": "" + }, + { + "code": "002496", + "name": "辉丰股份", + "tag": "农业", + "reason": "" + }, + { + "code": "002498", + "name": "汉缆股份", + "tag": "电力", + "reason": "" + }, + { + "code": "300141", + "name": "和顺电气", + "tag": "电力", + "reason": "" + }, + { + "code": "002505", + "name": "鹏都农牧", + "tag": "大消费", + "reason": "" + }, + { + "code": "002506", + "name": "协鑫集成", + "tag": "赛道", + "reason": "" + }, + { + "code": "002504", + "name": "*ST弘高", + "tag": "房地产", + "reason": "" + }, + { + "code": "002508", + "name": "老板电器", + "tag": "大消费", + "reason": "" + }, + { + "code": "002511", + "name": "中顺洁柔", + "tag": "大消费", + "reason": "" + }, + { + "code": "002510", + "name": "天汽模", + "tag": "汽车", + "reason": "" + }, + { + "code": "002513", + "name": "蓝丰生化", + "tag": "农业", + "reason": "" + }, + { + "code": "002514", + "name": "宝馨科技", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002519", + "name": "银河电子", + "tag": "大消费", + "reason": "" + }, + { + "code": "002516", + "name": "旷达科技", + "tag": "汽车", + "reason": "" + }, + { + "code": "300143", + "name": "盈康生命", + "tag": "医药", + "reason": "" + }, + { + "code": "300144", + "name": "宋城演艺", + "tag": "大消费", + "reason": "" + }, + { + "code": "300145", + "name": "中金环境", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002522", + "name": "浙江众成", + "tag": "化工", + "reason": "" + }, + { + "code": "002521", + "name": "齐峰新材", + "tag": "大消费", + "reason": "" + }, + { + "code": "300148", + "name": "天舟文化", + "tag": "AI", + "reason": "" + }, + { + "code": "002524", + "name": "光正眼科", + "tag": "医药", + "reason": "" + }, + { + "code": "300151", + "name": "昌红科技", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300150", + "name": "世纪瑞尔", + "tag": "AI", + "reason": "" + }, + { + "code": "002528", + "name": "英飞拓", + "tag": "AI", + "reason": "" + }, + { + "code": "601890", + "name": "亚星锚链", + "tag": "公用", + "reason": "" + }, + { + "code": "300152", + "name": "新动力", + "tag": "公用", + "reason": "" + }, + { + "code": "300153", + "name": "科泰电源", + "tag": "赛道", + "reason": "" + }, + { + "code": "601126", + "name": "四方股份", + "tag": "电力", + "reason": "" + }, + { + "code": "002530", + "name": "金财互联", + "tag": "AI", + "reason": "" + }, + { + "code": "002531", + "name": "天顺风能", + "tag": "赛道", + "reason": "" + }, + { + "code": "002533", + "name": "金杯电工", + "tag": "电力", + "reason": "" + }, + { + "code": "601118", + "name": "海南橡胶", + "tag": "化工", + "reason": "" + }, + { + "code": "300159", + "name": "新研股份", + "tag": "军工", + "reason": "" + }, + { + "code": "300157", + "name": "新锦动力", + "tag": "公用", + "reason": "" + }, + { + "code": "300155", + "name": "安居宝", + "tag": "AI", + "reason": "" + }, + { + "code": "002534", + "name": "西子洁能", + "tag": "赛道", + "reason": "" + }, + { + "code": "002537", + "name": "海联金汇", + "tag": "汽车", + "reason": "" + }, + { + "code": "002536", + "name": "飞龙股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "300162", + "name": "雷曼光电", + "tag": "VR", + "reason": "" + }, + { + "code": "300160", + "name": "秀强股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "300163", + "name": "先锋新材", + "tag": "化工", + "reason": "" + }, + { + "code": "300164", + "name": "通源石油", + "tag": "公用", + "reason": "" + }, + { + "code": "601700", + "name": "风范股份", + "tag": "电力", + "reason": "" + }, + { + "code": "002541", + "name": "鸿路钢构", + "tag": "房地产", + "reason": "" + }, + { + "code": "300167", + "name": "ST迪威迅", + "tag": "AI", + "reason": "" + }, + { + "code": "300168", + "name": "万达信息", + "tag": "AI", + "reason": "" + }, + { + "code": "300169", + "name": "天晟新材", + "tag": "化工", + "reason": "" + }, + { + "code": "300165", + "name": "天瑞仪器", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002543", + "name": "万和电气", + "tag": "大消费", + "reason": "" + }, + { + "code": "002542", + "name": "中化岩土", + "tag": "房地产", + "reason": "" + }, + { + "code": "601799", + "name": "星宇股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "601616", + "name": "广电电气", + "tag": "电力", + "reason": "" + }, + { + "code": "300174", + "name": "元力股份", + "tag": "化工", + "reason": "" + }, + { + "code": "300170", + "name": "汉得信息", + "tag": "AI", + "reason": "" + }, + { + "code": "002546", + "name": "新联电子", + "tag": "电力", + "reason": "" + }, + { + "code": "002545", + "name": "东方铁塔", + "tag": "房地产", + "reason": "" + }, + { + "code": "300177", + "name": "中海达", + "tag": "AI", + "reason": "" + }, + { + "code": "300179", + "name": "四方达", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300176", + "name": "派生科技", + "tag": "汽车", + "reason": "" + }, + { + "code": "002548", + "name": "金新农", + "tag": "农业", + "reason": "" + }, + { + "code": "002550", + "name": "千红制药", + "tag": "医药", + "reason": "" + }, + { + "code": "002549", + "name": "凯美特气", + "tag": "化工", + "reason": "" + }, + { + "code": "601216", + "name": "君正集团", + "tag": "化工", + "reason": "" + }, + { + "code": "300184", + "name": "力源信息", + "tag": "大消费", + "reason": "" + }, + { + "code": "300183", + "name": "东软载波", + "tag": "AI", + "reason": "" + }, + { + "code": "300180", + "name": "华峰超纤", + "tag": "化工", + "reason": "" + }, + { + "code": "002553", + "name": "南方精工", + "tag": "汽车", + "reason": "" + }, + { + "code": "002552", + "name": "宝鼎科技", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002554", + "name": "惠博普", + "tag": "公用", + "reason": "" + }, + { + "code": "601992", + "name": "金隅集团", + "tag": "房地产", + "reason": "" + }, + { + "code": "002555", + "name": "三七互娱", + "tag": "AI", + "reason": "" + }, + { + "code": "002560", + "name": "通达股份", + "tag": "电力", + "reason": "" + }, + { + "code": "300185", + "name": "通裕重工", + "tag": "赛道", + "reason": "" + }, + { + "code": "300187", + "name": "永清环保", + "tag": "公用", + "reason": "" + }, + { + "code": "002564", + "name": "*ST天沃", + "tag": "专业服务", + "reason": "" + }, + { + "code": "002562", + "name": "兄弟科技", + "tag": "化工", + "reason": "" + }, + { + "code": "002563", + "name": "森马服饰", + "tag": "大消费", + "reason": "" + }, + { + "code": "300189", + "name": "神农科技", + "tag": "农业", + "reason": "" + }, + { + "code": "300190", + "name": "维尔利", + "tag": "公用", + "reason": "" + }, + { + "code": "300191", + "name": "潜能恒信", + "tag": "公用", + "reason": "" + }, + { + "code": "601199", + "name": "江南水务", + "tag": "公用", + "reason": "" + }, + { + "code": "002565", + "name": "顺灏股份", + "tag": "化工", + "reason": "" + }, + { + "code": "300192", + "name": "科德教育", + "tag": "教育", + "reason": "" + }, + { + "code": "300194", + "name": "福安药业", + "tag": "医药", + "reason": "" + }, + { + "code": "002568", + "name": "百润股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "002567", + "name": "唐人神", + "tag": "农业", + "reason": "" + }, + { + "code": "300196", + "name": "长海股份", + "tag": "化工", + "reason": "" + }, + { + "code": "300200", + "name": "高盟新材", + "tag": "化工", + "reason": "" + }, + { + "code": "002572", + "name": "索菲亚", + "tag": "房地产", + "reason": "" + }, + { + "code": "002569", + "name": "ST步森", + "tag": "大消费", + "reason": "" + }, + { + "code": "002571", + "name": "德力股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "300203", + "name": "聚光科技", + "tag": "公用", + "reason": "" + }, + { + "code": "300205", + "name": "ST天喻", + "tag": "AI", + "reason": "" + }, + { + "code": "002574", + "name": "明牌珠宝", + "tag": "大消费", + "reason": "" + }, + { + "code": "002573", + "name": "清新环境", + "tag": "公用", + "reason": "" + }, + { + "code": "300210", + "name": "森远股份", + "tag": "公用", + "reason": "" + }, + { + "code": "300208", + "name": "青岛中程", + "tag": "电力", + "reason": "" + }, + { + "code": "300209", + "name": "ST有棵树", + "tag": "AI", + "reason": "" + }, + { + "code": "002576", + "name": "通达动力", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300213", + "name": "佳讯飞鸿", + "tag": "AI", + "reason": "" + }, + { + "code": "300211", + "name": "亿通科技", + "tag": "AI", + "reason": "" + }, + { + "code": "601218", + "name": "吉鑫科技", + "tag": "赛道", + "reason": "" + }, + { + "code": "002579", + "name": "中京电子", + "tag": "消费电子", + "reason": "" + }, + { + "code": "300215", + "name": "电科院", + "tag": "专业服务", + "reason": "" + }, + { + "code": "300214", + "name": "日科化学", + "tag": "化工", + "reason": "" + }, + { + "code": "601233", + "name": "桐昆股份", + "tag": "化工", + "reason": "" + }, + { + "code": "300219", + "name": "鸿利智汇", + "tag": "VR", + "reason": "" + }, + { + "code": "300218", + "name": "安利股份", + "tag": "化工", + "reason": "" + }, + { + "code": "601208", + "name": "东材科技", + "tag": "化工", + "reason": "" + }, + { + "code": "300220", + "name": "ST金运", + "tag": "消费电子", + "reason": "" + }, + { + "code": "300221", + "name": "银禧科技", + "tag": "化工", + "reason": "" + }, + { + "code": "601599", + "name": "浙文影业", + "tag": "传媒", + "reason": "" + }, + { + "code": "300224", + "name": "正海磁材", + "tag": "资源", + "reason": "" + }, + { + "code": "300225", + "name": "金力泰", + "tag": "化工", + "reason": "" + }, + { + "code": "002586", + "name": "*ST围海", + "tag": "房地产", + "reason": "" + }, + { + "code": "002585", + "name": "双星新材", + "tag": "化工", + "reason": "" + }, + { + "code": "300228", + "name": "富瑞特装", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300227", + "name": "光韵达", + "tag": "消费电子", + "reason": "" + }, + { + "code": "300226", + "name": "上海钢联", + "tag": "AI", + "reason": "" + }, + { + "code": "601567", + "name": "三星医疗", + "tag": "电力", + "reason": "" + }, + { + "code": "300231", + "name": "银信科技", + "tag": "AI", + "reason": "" + }, + { + "code": "300229", + "name": "拓尔思", + "tag": "AI", + "reason": "" + }, + { + "code": "300230", + "name": "永利股份", + "tag": "化工", + "reason": "" + }, + { + "code": "300233", + "name": "金城医药", + "tag": "医药", + "reason": "" + }, + { + "code": "300234", + "name": "开尔新材", + "tag": "房地产", + "reason": "" + }, + { + "code": "002595", + "name": "豪迈科技", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002593", + "name": "日上集团", + "tag": "汽车", + "reason": "" + }, + { + "code": "300237", + "name": "美晨生态", + "tag": "房地产", + "reason": "" + }, + { + "code": "300236", + "name": "上海新阳", + "tag": "半导体", + "reason": "" + }, + { + "code": "300235", + "name": "方直科技", + "tag": "AI", + "reason": "" + }, + { + "code": "601058", + "name": "赛轮轮胎", + "tag": "化工", + "reason": "" + }, + { + "code": "300240", + "name": "飞力达", + "tag": "统一大市场", + "reason": "" + }, + { + "code": "002596", + "name": "海南瑞泽", + "tag": "房地产", + "reason": "" + }, + { + "code": "300241", + "name": "瑞丰光电", + "tag": "VR", + "reason": "" + }, + { + "code": "300243", + "name": "瑞丰高材", + "tag": "化工", + "reason": "" + }, + { + "code": "300245", + "name": "天玑科技", + "tag": "AI", + "reason": "" + }, + { + "code": "300244", + "name": "迪安诊断", + "tag": "医药", + "reason": "" + }, + { + "code": "300247", + "name": "融捷健康", + "tag": "大消费", + "reason": "" + }, + { + "code": "300248", + "name": "新开普", + "tag": "AI", + "reason": "" + }, + { + "code": "002606", + "name": "大连电瓷", + "tag": "电力", + "reason": "" + }, + { + "code": "002607", + "name": "中公教育", + "tag": "教育", + "reason": "" + }, + { + "code": "601636", + "name": "旗滨集团", + "tag": "化工", + "reason": "" + }, + { + "code": "002609", + "name": "捷顺科技", + "tag": "AI", + "reason": "" + }, + { + "code": "601789", + "name": "宁波建工", + "tag": "房地产", + "reason": "" + }, + { + "code": "300253", + "name": "卫宁健康", + "tag": "AI", + "reason": "" + }, + { + "code": "300257", + "name": "开山股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300258", + "name": "精锻科技", + "tag": "汽车", + "reason": "" + }, + { + "code": "002613", + "name": "北玻股份", + "tag": "化工", + "reason": "" + }, + { + "code": "300259", + "name": "新天科技", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300261", + "name": "雅本化学", + "tag": "农业", + "reason": "" + }, + { + "code": "300260", + "name": "新莱应材", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300265", + "name": "通光线缆", + "tag": "电力", + "reason": "" + }, + { + "code": "300263", + "name": "隆华科技", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300268", + "name": "*ST佳沃", + "tag": "农业", + "reason": "" + }, + { + "code": "300266", + "name": "兴源环境", + "tag": "公用", + "reason": "" + }, + { + "code": "601633", + "name": "长城汽车", + "tag": "汽车", + "reason": "" + }, + { + "code": "601996", + "name": "丰林集团", + "tag": "房地产", + "reason": "" + }, + { + "code": "002621", + "name": "美吉姆", + "tag": "教育", + "reason": "" + }, + { + "code": "002620", + "name": "瑞和股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "300270", + "name": "中威电子", + "tag": "AI", + "reason": "" + }, + { + "code": "002623", + "name": "亚玛顿", + "tag": "赛道", + "reason": "" + }, + { + "code": "002622", + "name": "皓宸医疗", + "tag": "电力", + "reason": "" + }, + { + "code": "300271", + "name": "华宇软件", + "tag": "AI", + "reason": "" + }, + { + "code": "601100", + "name": "恒立液压", + "tag": "房地产", + "reason": "" + }, + { + "code": "300272", + "name": "开能健康", + "tag": "大消费", + "reason": "" + }, + { + "code": "300274", + "name": "阳光电源", + "tag": "赛道", + "reason": "" + }, + { + "code": "300275", + "name": "梅安森", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002628", + "name": "成都路桥", + "tag": "房地产", + "reason": "" + }, + { + "code": "002627", + "name": "三峡旅游", + "tag": "公用", + "reason": "" + }, + { + "code": "002629", + "name": "仁智股份", + "tag": "公用", + "reason": "" + }, + { + "code": "601028", + "name": "玉龙股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "002631", + "name": "德尔未来", + "tag": "房地产", + "reason": "" + }, + { + "code": "002630", + "name": "华西能源", + "tag": "赛道", + "reason": "" + }, + { + "code": "002592", + "name": "ST八菱", + "tag": "汽车", + "reason": "" + }, + { + "code": "002633", + "name": "申科股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300277", + "name": "海联讯", + "tag": "AI", + "reason": "" + }, + { + "code": "002638", + "name": "勤上股份", + "tag": "VR", + "reason": "" + }, + { + "code": "002636", + "name": "金安国纪", + "tag": "消费电子", + "reason": "" + }, + { + "code": "002637", + "name": "赞宇科技", + "tag": "化工", + "reason": "" + }, + { + "code": "002639", + "name": "雪人股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002640", + "name": "跨境通", + "tag": "大消费", + "reason": "" + }, + { + "code": "430090", + "name": "同辉信息", + "tag": "AI", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "002642", + "name": "荣联科技", + "tag": "AI", + "reason": "" + }, + { + "code": "002645", + "name": "华宏科技", + "tag": "公用", + "reason": "" + }, + { + "code": "002647", + "name": "仁东控股", + "tag": "金融", + "reason": "" + }, + { + "code": "300281", + "name": "金明精机", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002651", + "name": "利君股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002649", + "name": "博彦科技", + "tag": "AI", + "reason": "" + }, + { + "code": "300284", + "name": "苏交科", + "tag": "房地产", + "reason": "" + }, + { + "code": "300286", + "name": "安科瑞", + "tag": "电力", + "reason": "" + }, + { + "code": "300285", + "name": "国瓷材料", + "tag": "化工", + "reason": "" + }, + { + "code": "601360", + "name": "三六零", + "tag": "AI", + "reason": "" + }, + { + "code": "002652", + "name": "扬子新材", + "tag": "房地产", + "reason": "" + }, + { + "code": "300287", + "name": "飞利信", + "tag": "AI", + "reason": "" + }, + { + "code": "300288", + "name": "朗玛信息", + "tag": "AI", + "reason": "" + }, + { + "code": "002656", + "name": "ST摩登", + "tag": "大消费", + "reason": "" + }, + { + "code": "300292", + "name": "吴通控股", + "tag": "AI", + "reason": "" + }, + { + "code": "002659", + "name": "凯文教育", + "tag": "教育", + "reason": "" + }, + { + "code": "002658", + "name": "雪迪龙", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300296", + "name": "利亚德", + "tag": "VR", + "reason": "" + }, + { + "code": "002663", + "name": "普邦股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "002664", + "name": "信质集团", + "tag": "汽车", + "reason": "" + }, + { + "code": "300303", + "name": "聚飞光电", + "tag": "VR", + "reason": "" + }, + { + "code": "300299", + "name": "富春股份", + "tag": "AI", + "reason": "" + }, + { + "code": "300300", + "name": "海峡创新", + "tag": "AI", + "reason": "" + }, + { + "code": "300301", + "name": "*ST长方", + "tag": "VR", + "reason": "" + }, + { + "code": "300302", + "name": "同有科技", + "tag": "AI", + "reason": "" + }, + { + "code": "300304", + "name": "云意电气", + "tag": "汽车", + "reason": "" + }, + { + "code": "002666", + "name": "德联集团", + "tag": "化工", + "reason": "" + }, + { + "code": "300306", + "name": "远方信息", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300305", + "name": "裕兴股份", + "tag": "化工", + "reason": "" + }, + { + "code": "601012", + "name": "隆基绿能", + "tag": "赛道", + "reason": "" + }, + { + "code": "002668", + "name": "奥马电器", + "tag": "大消费", + "reason": "" + }, + { + "code": "300311", + "name": "任子行", + "tag": "AI", + "reason": "" + }, + { + "code": "300310", + "name": "宜通世纪", + "tag": "AI", + "reason": "" + }, + { + "code": "300313", + "name": "*ST天山", + "tag": "农业", + "reason": "" + }, + { + "code": "603001", + "name": "ST奥康", + "tag": "大消费", + "reason": "" + }, + { + "code": "002672", + "name": "东江环保", + "tag": "公用", + "reason": "" + }, + { + "code": "002671", + "name": "龙泉股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "002674", + "name": "兴业科技", + "tag": "大消费", + "reason": "" + }, + { + "code": "300315", + "name": "掌趣科技", + "tag": "AI", + "reason": "" + }, + { + "code": "300316", + "name": "晶盛机电", + "tag": "赛道", + "reason": "" + }, + { + "code": "603002", + "name": "宏昌电子", + "tag": "消费电子", + "reason": "" + }, + { + "code": "603366", + "name": "日出东方", + "tag": "大消费", + "reason": "" + }, + { + "code": "300319", + "name": "麦捷科技", + "tag": "VR", + "reason": "" + }, + { + "code": "300321", + "name": "同大股份", + "tag": "化工", + "reason": "" + }, + { + "code": "002677", + "name": "浙江美大", + "tag": "大消费", + "reason": "" + }, + { + "code": "002676", + "name": "顺威股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "002678", + "name": "珠江钢琴", + "tag": "大消费", + "reason": "" + }, + { + "code": "300320", + "name": "海达股份", + "tag": "化工", + "reason": "" + }, + { + "code": "002679", + "name": "福建金森", + "tag": "农业", + "reason": "" + }, + { + "code": "300324", + "name": "旋极信息", + "tag": "AI", + "reason": "" + }, + { + "code": "601339", + "name": "百隆东方", + "tag": "大消费", + "reason": "" + }, + { + "code": "002685", + "name": "华东重机", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002682", + "name": "龙洲股份", + "tag": "统一大市场", + "reason": "" + }, + { + "code": "002683", + "name": "广东宏大", + "tag": "公用", + "reason": "" + }, + { + "code": "300329", + "name": "海伦钢琴", + "tag": "大消费", + "reason": "" + }, + { + "code": "300328", + "name": "宜安科技", + "tag": "资源", + "reason": "" + }, + { + "code": "300331", + "name": "苏大维格", + "tag": "VR", + "reason": "" + }, + { + "code": "002686", + "name": "亿利达", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300335", + "name": "迪森股份", + "tag": "公用", + "reason": "" + }, + { + "code": "002687", + "name": "乔治白", + "tag": "大消费", + "reason": "" + }, + { + "code": "002688", + "name": "金河生物", + "tag": "农业", + "reason": "" + }, + { + "code": "603008", + "name": "喜临门", + "tag": "房地产", + "reason": "" + }, + { + "code": "300338", + "name": "开元教育", + "tag": "教育", + "reason": "" + }, + { + "code": "300341", + "name": "麦克奥迪", + "tag": "电力", + "reason": "" + }, + { + "code": "002691", + "name": "冀凯股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300345", + "name": "华民股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300343", + "name": "联创股份", + "tag": "化工", + "reason": "" + }, + { + "code": "300346", + "name": "南大光电", + "tag": "半导体", + "reason": "" + }, + { + "code": "002692", + "name": "远程股份", + "tag": "电力", + "reason": "" + }, + { + "code": "603766", + "name": "隆鑫通用", + "tag": "公用", + "reason": "" + }, + { + "code": "002694", + "name": "顾地科技", + "tag": "房地产", + "reason": "" + }, + { + "code": "603003", + "name": "龙宇股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "300347", + "name": "泰格医药", + "tag": "医药", + "reason": "" + }, + { + "code": "300349", + "name": "金卡智能", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300348", + "name": "长亮科技", + "tag": "AI", + "reason": "" + }, + { + "code": "300350", + "name": "华鹏飞", + "tag": "统一大市场", + "reason": "" + }, + { + "code": "002696", + "name": "百洋股份", + "tag": "农业", + "reason": "" + }, + { + "code": "603167", + "name": "渤海轮渡", + "tag": "公用", + "reason": "" + }, + { + "code": "300351", + "name": "永贵电器", + "tag": "公用", + "reason": "" + }, + { + "code": "300354", + "name": "东华测试", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300353", + "name": "东土科技", + "tag": "AI", + "reason": "" + }, + { + "code": "300355", + "name": "蒙草生态", + "tag": "公用", + "reason": "" + }, + { + "code": "002701", + "name": "奥瑞金", + "tag": "化工", + "reason": "" + }, + { + "code": "430017", + "name": "星昊医药", + "tag": "医药", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "430198", + "name": "微创光电", + "tag": "AI", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603699", + "name": "纽威股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300359", + "name": "全通教育", + "tag": "教育", + "reason": "" + }, + { + "code": "002705", + "name": "新宝股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "002706", + "name": "良信股份", + "tag": "电力", + "reason": "" + }, + { + "code": "300360", + "name": "炬华科技", + "tag": "电力", + "reason": "" + }, + { + "code": "603308", + "name": "应流股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002707", + "name": "众信旅游", + "tag": "大消费", + "reason": "" + }, + { + "code": "300368", + "name": "汇金股份", + "tag": "AI", + "reason": "" + }, + { + "code": "300370", + "name": "安控科技", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300371", + "name": "汇中股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300365", + "name": "恒华科技", + "tag": "AI", + "reason": "" + }, + { + "code": "603555", + "name": "ST贵人", + "tag": "大消费", + "reason": "" + }, + { + "code": "430476", + "name": "海能技术", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "430425", + "name": "乐创技术", + "tag": "AI", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "430564", + "name": "天润科技", + "tag": "AI", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "430556", + "name": "雅达股份", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "002721", + "name": "*ST金一", + "tag": "大消费", + "reason": "" + }, + { + "code": "300378", + "name": "鼎捷软件", + "tag": "AI", + "reason": "" + }, + { + "code": "300377", + "name": "赢时胜", + "tag": "AI", + "reason": "" + }, + { + "code": "300375", + "name": "鹏翎股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "300381", + "name": "溢多利", + "tag": "医药", + "reason": "" + }, + { + "code": "002718", + "name": "友邦吊顶", + "tag": "房地产", + "reason": "" + }, + { + "code": "300379", + "name": "东方通", + "tag": "AI", + "reason": "" + }, + { + "code": "002714", + "name": "牧原股份", + "tag": "农业", + "reason": "" + }, + { + "code": "002716", + "name": "金贵银业", + "tag": "资源", + "reason": "" + }, + { + "code": "300380", + "name": "安硕信息", + "tag": "AI", + "reason": "" + }, + { + "code": "002725", + "name": "跃岭股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "300363", + "name": "博腾股份", + "tag": "医药", + "reason": "" + }, + { + "code": "002723", + "name": "小崧股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "300369", + "name": "绿盟科技", + "tag": "AI", + "reason": "" + }, + { + "code": "002713", + "name": "东易日盛", + "tag": "房地产", + "reason": "" + }, + { + "code": "002715", + "name": "登云股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "002717", + "name": "岭南股份", + "tag": "公用", + "reason": "" + }, + { + "code": "300385", + "name": "雪浪环境", + "tag": "公用", + "reason": "" + }, + { + "code": "300386", + "name": "飞天诚信", + "tag": "AI", + "reason": "" + }, + { + "code": "603006", + "name": "联明股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "603328", + "name": "依顿电子", + "tag": "消费电子", + "reason": "" + }, + { + "code": "430489", + "name": "佳先股份", + "tag": "化工", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603009", + "name": "北特科技", + "tag": "汽车", + "reason": "" + }, + { + "code": "603111", + "name": "康尼机电", + "tag": "公用", + "reason": "" + }, + { + "code": "300389", + "name": "艾比森", + "tag": "VR", + "reason": "" + }, + { + "code": "300384", + "name": "三联虹普", + "tag": "专业服务", + "reason": "" + }, + { + "code": "603100", + "name": "川仪股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "603609", + "name": "禾丰股份", + "tag": "农业", + "reason": "" + }, + { + "code": "830879", + "name": "基康仪器", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603188", + "name": "亚邦股份", + "tag": "化工", + "reason": "" + }, + { + "code": "300397", + "name": "天和防务", + "tag": "AI", + "reason": "" + }, + { + "code": "300395", + "name": "菲利华", + "tag": "化工", + "reason": "" + }, + { + "code": "002729", + "name": "好利科技", + "tag": "消费电子", + "reason": "" + }, + { + "code": "300393", + "name": "中来股份", + "tag": "赛道", + "reason": "" + }, + { + "code": "603306", + "name": "华懋科技", + "tag": "汽车", + "reason": "" + }, + { + "code": "603169", + "name": "兰石重装", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002730", + "name": "电光科技", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300401", + "name": "花园生物", + "tag": "医药", + "reason": "" + }, + { + "code": "300398", + "name": "飞凯材料", + "tag": "半导体", + "reason": "" + }, + { + "code": "300399", + "name": "天利科技", + "tag": "AI", + "reason": "" + }, + { + "code": "603456", + "name": "九洲药业", + "tag": "医药", + "reason": "" + }, + { + "code": "300400", + "name": "劲拓股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300402", + "name": "宝色股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "603018", + "name": "华设集团", + "tag": "房地产", + "reason": "" + }, + { + "code": "603606", + "name": "东方电缆", + "tag": "电力", + "reason": "" + }, + { + "code": "831152", + "name": "昆工科技", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "300403", + "name": "汉宇集团", + "tag": "大消费", + "reason": "" + }, + { + "code": "603688", + "name": "石英股份", + "tag": "化工", + "reason": "" + }, + { + "code": "430478", + "name": "峆一药业", + "tag": "医药", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603988", + "name": "中电电机", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002724", + "name": "海洋王", + "tag": "消费电子", + "reason": "" + }, + { + "code": "430718", + "name": "合肥高科", + "tag": "消费电子", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603166", + "name": "福达股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "831087", + "name": "秋乐种业", + "tag": "农业", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "300408", + "name": "三环集团", + "tag": "消费电子", + "reason": "" + }, + { + "code": "002735", + "name": "王子新材", + "tag": "化工", + "reason": "" + }, + { + "code": "300407", + "name": "凯发电气", + "tag": "电力", + "reason": "" + }, + { + "code": "603588", + "name": "高能环境", + "tag": "公用", + "reason": "" + }, + { + "code": "430418", + "name": "苏轴股份", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603017", + "name": "中衡设计", + "tag": "房地产", + "reason": "" + }, + { + "code": "603889", + "name": "新澳股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "300411", + "name": "金盾股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "830964", + "name": "润农节水", + "tag": "房地产", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603788", + "name": "宁波高发", + "tag": "汽车", + "reason": "" + }, + { + "code": "601021", + "name": "春秋航空", + "tag": "公用", + "reason": "" + }, + { + "code": "603601", + "name": "再升科技", + "tag": "化工", + "reason": "" + }, + { + "code": "601069", + "name": "西部黄金", + "tag": "资源", + "reason": "" + }, + { + "code": "002740", + "name": "*ST爱迪", + "tag": "大消费", + "reason": "" + }, + { + "code": "300416", + "name": "苏试试验", + "tag": "专业服务", + "reason": "" + }, + { + "code": "300419", + "name": "浩丰科技", + "tag": "AI", + "reason": "" + }, + { + "code": "300417", + "name": "南华仪器", + "tag": "智能机器", + "reason": "" + }, + { + "code": "603678", + "name": "火炬电子", + "tag": "消费电子", + "reason": "" + }, + { + "code": "603686", + "name": "福龙马", + "tag": "公用", + "reason": "" + }, + { + "code": "603899", + "name": "晨光股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "603558", + "name": "健盛集团", + "tag": "大消费", + "reason": "" + }, + { + "code": "603611", + "name": "诺力股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "830974", + "name": "凯大催化", + "tag": "化工", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "002746", + "name": "仙坛股份", + "tag": "农业", + "reason": "" + }, + { + "code": "603939", + "name": "益丰药房", + "tag": "医药", + "reason": "" + }, + { + "code": "603898", + "name": "好莱客", + "tag": "房地产", + "reason": "" + }, + { + "code": "603222", + "name": "济民医疗", + "tag": "医药", + "reason": "" + }, + { + "code": "300421", + "name": "力星股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002745", + "name": "木林森", + "tag": "VR", + "reason": "" + }, + { + "code": "002743", + "name": "富煌钢构", + "tag": "房地产", + "reason": "" + }, + { + "code": "300422", + "name": "博世科", + "tag": "公用", + "reason": "" + }, + { + "code": "002742", + "name": "ST三圣", + "tag": "房地产", + "reason": "" + }, + { + "code": "300427", + "name": "*ST红相", + "tag": "电力", + "reason": "" + }, + { + "code": "300423", + "name": "昇辉科技", + "tag": "电力", + "reason": "" + }, + { + "code": "603118", + "name": "共进股份", + "tag": "AI", + "reason": "" + }, + { + "code": "603828", + "name": "柯利达", + "tag": "房地产", + "reason": "" + }, + { + "code": "603969", + "name": "银龙股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "603997", + "name": "继峰股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "831370", + "name": "新安洁", + "tag": "公用", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "831961", + "name": "创远信科", + "tag": "AI", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603519", + "name": "立霸股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "603268", + "name": "松发股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "601689", + "name": "拓普集团", + "tag": "汽车", + "reason": "" + }, + { + "code": "002748", + "name": "世龙实业", + "tag": "化工", + "reason": "" + }, + { + "code": "603012", + "name": "创力集团", + "tag": "智能机器", + "reason": "" + }, + { + "code": "603158", + "name": "腾龙股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "002749", + "name": "国光股份", + "tag": "农业", + "reason": "" + }, + { + "code": "300429", + "name": "强力新材", + "tag": "半导体", + "reason": "" + }, + { + "code": "832149", + "name": "利尔达", + "tag": "大消费", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603338", + "name": "浙江鼎力", + "tag": "房地产", + "reason": "" + }, + { + "code": "603869", + "name": "新智认知", + "tag": "AI", + "reason": "" + }, + { + "code": "831832", + "name": "科达自控", + "tag": "AI", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "831175", + "name": "派诺科技", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603818", + "name": "曲美家居", + "tag": "房地产", + "reason": "" + }, + { + "code": "603808", + "name": "歌力思", + "tag": "大消费", + "reason": "" + }, + { + "code": "300449", + "name": "汉邦高科", + "tag": "AI", + "reason": "" + }, + { + "code": "300424", + "name": "航新科技", + "tag": "军工", + "reason": "" + }, + { + "code": "300443", + "name": "金雷股份", + "tag": "赛道", + "reason": "" + }, + { + "code": "002752", + "name": "昇兴股份", + "tag": "化工", + "reason": "" + }, + { + "code": "603703", + "name": "盛洋科技", + "tag": "VR", + "reason": "" + }, + { + "code": "300441", + "name": "鲍斯股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300440", + "name": "运达科技", + "tag": "AI", + "reason": "" + }, + { + "code": "300444", + "name": "双杰电气", + "tag": "电力", + "reason": "" + }, + { + "code": "300445", + "name": "康斯特", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300448", + "name": "浩云科技", + "tag": "AI", + "reason": "" + }, + { + "code": "603789", + "name": "星光农机", + "tag": "智能机器", + "reason": "" + }, + { + "code": "603599", + "name": "广信股份", + "tag": "农业", + "reason": "" + }, + { + "code": "603355", + "name": "莱克电气", + "tag": "大消费", + "reason": "" + }, + { + "code": "300414", + "name": "中光防雷", + "tag": "AI", + "reason": "" + }, + { + "code": "300451", + "name": "创业慧康", + "tag": "AI", + "reason": "" + }, + { + "code": "603718", + "name": "海利生物", + "tag": "农业", + "reason": "" + }, + { + "code": "603989", + "name": "艾华集团", + "tag": "消费电子", + "reason": "" + }, + { + "code": "603227", + "name": "雪峰科技", + "tag": "化工", + "reason": "" + }, + { + "code": "300460", + "name": "惠伦晶体", + "tag": "消费电子", + "reason": "" + }, + { + "code": "300452", + "name": "山河药辅", + "tag": "医药", + "reason": "" + }, + { + "code": "603311", + "name": "金海高科", + "tag": "大消费", + "reason": "" + }, + { + "code": "603968", + "name": "醋化股份", + "tag": "化工", + "reason": "" + }, + { + "code": "603022", + "name": "新通联", + "tag": "化工", + "reason": "" + }, + { + "code": "603566", + "name": "普莱柯", + "tag": "农业", + "reason": "" + }, + { + "code": "002753", + "name": "永东股份", + "tag": "化工", + "reason": "" + }, + { + "code": "832469", + "name": "富恒新材", + "tag": "化工", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "830832", + "name": "齐鲁华信", + "tag": "化工", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "832419", + "name": "路斯股份", + "tag": "农业", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603885", + "name": "吉祥航空", + "tag": "公用", + "reason": "" + }, + { + "code": "603023", + "name": "威帝股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "603108", + "name": "润达医疗", + "tag": "医药", + "reason": "" + }, + { + "code": "002757", + "name": "南兴股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300467", + "name": "迅游科技", + "tag": "AI", + "reason": "" + }, + { + "code": "300468", + "name": "四方精创", + "tag": "AI", + "reason": "" + }, + { + "code": "300462", + "name": "华铭智能", + "tag": "公用", + "reason": "" + }, + { + "code": "603669", + "name": "灵康药业", + "tag": "医药", + "reason": "" + }, + { + "code": "603918", + "name": "金桥信息", + "tag": "AI", + "reason": "" + }, + { + "code": "603568", + "name": "伟明环保", + "tag": "公用", + "reason": "" + }, + { + "code": "300465", + "name": "高伟达", + "tag": "AI", + "reason": "" + }, + { + "code": "603300", + "name": "华铁应急", + "tag": "智能机器", + "reason": "" + }, + { + "code": "831768", + "name": "拾比佰", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "830839", + "name": "万通液压", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "002763", + "name": "汇洁股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "002761", + "name": "浙江建投", + "tag": "房地产", + "reason": "" + }, + { + "code": "300478", + "name": "杭州高新", + "tag": "化工", + "reason": "" + }, + { + "code": "300464", + "name": "星徽股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "603066", + "name": "音飞储存", + "tag": "统一大市场", + "reason": "" + }, + { + "code": "603616", + "name": "韩建河山", + "tag": "房地产", + "reason": "" + }, + { + "code": "002766", + "name": "索菱股份", + "tag": "AI", + "reason": "" + }, + { + "code": "300471", + "name": "厚普股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002760", + "name": "凤形股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300476", + "name": "胜宏科技", + "tag": "AI", + "reason": "" + }, + { + "code": "300469", + "name": "信息发展", + "tag": "AI", + "reason": "" + }, + { + "code": "830779", + "name": "武汉蓝电", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "601368", + "name": "绿城水务", + "tag": "公用", + "reason": "" + }, + { + "code": "300479", + "name": "神思电子", + "tag": "AI", + "reason": "" + }, + { + "code": "300470", + "name": "中密控股", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002767", + "name": "先锋电子", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002765", + "name": "蓝黛科技", + "tag": "VR", + "reason": "" + }, + { + "code": "430510", + "name": "丰光精密", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "831195", + "name": "三祥科技", + "tag": "汽车", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "002772", + "name": "众兴菌业", + "tag": "农业", + "reason": "" + }, + { + "code": "002776", + "name": "*ST柏龙", + "tag": "大消费", + "reason": "" + }, + { + "code": "603117", + "name": "ST万林", + "tag": "统一大市场", + "reason": "" + }, + { + "code": "603116", + "name": "红蜻蜓", + "tag": "大消费", + "reason": "" + }, + { + "code": "002775", + "name": "文科股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "002769", + "name": "普路通", + "tag": "统一大市场", + "reason": "" + }, + { + "code": "603085", + "name": "天成自控", + "tag": "汽车", + "reason": "" + }, + { + "code": "603223", + "name": "恒通股份", + "tag": "统一大市场", + "reason": "" + }, + { + "code": "002768", + "name": "国恩股份", + "tag": "化工", + "reason": "" + }, + { + "code": "300481", + "name": "濮阳惠成", + "tag": "半导体", + "reason": "" + }, + { + "code": "603838", + "name": "四通股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "300488", + "name": "恒锋工具", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300480", + "name": "光力科技", + "tag": "智能机器", + "reason": "" + }, + { + "code": "831641", + "name": "格利尔", + "tag": "大消费", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "832735", + "name": "德源药业", + "tag": "医药", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "831305", + "name": "海希通讯", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "831689", + "name": "克莱特", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "832802", + "name": "保丽洁", + "tag": "公用", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "832225", + "name": "利通科技", + "tag": "化工", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "831278", + "name": "泰德股份", + "tag": "汽车", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "831396", + "name": "许昌智能", + "tag": "电力", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "430685", + "name": "新芝生物", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "832145", + "name": "恒合股份", + "tag": "公用", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "833171", + "name": "国航远洋", + "tag": "公用", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "831526", + "name": "凯华材料", + "tag": "化工", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "832491", + "name": "奥迪威", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "832171", + "name": "志晟信息", + "tag": "AI", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "831445", + "name": "龙竹科技", + "tag": "房地产", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "832175", + "name": "东方碳素", + "tag": "化工", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "300498", + "name": "温氏股份", + "tag": "农业", + "reason": "" + }, + { + "code": "831856", + "name": "浩淼科技", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "833873", + "name": "中设咨询", + "tag": "房地产", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "832978", + "name": "开特股份", + "tag": "汽车", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "831039", + "name": "国义招标", + "tag": "专业服务", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "834415", + "name": "恒拓开源", + "tag": "AI", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603936", + "name": "博敏电子", + "tag": "消费电子", + "reason": "" + }, + { + "code": "002779", + "name": "中坚科技", + "tag": "智能机器", + "reason": "" + }, + { + "code": "833914", + "name": "远航精密", + "tag": "消费电子", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603800", + "name": "道森股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "833509", + "name": "同惠电子", + "tag": "电力", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "834682", + "name": "球冠电缆", + "tag": "电力", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "300495", + "name": "*ST美尚", + "tag": "公用", + "reason": "" + }, + { + "code": "833751", + "name": "惠同新材", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "002785", + "name": "万里石", + "tag": "房地产", + "reason": "" + }, + { + "code": "833454", + "name": "同心传动", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "300494", + "name": "盛天网络", + "tag": "AI", + "reason": "" + }, + { + "code": "002787", + "name": "华源控股", + "tag": "化工", + "reason": "" + }, + { + "code": "300491", + "name": "通合科技", + "tag": "赛道", + "reason": "" + }, + { + "code": "834062", + "name": "科润智控", + "tag": "电力", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "835508", + "name": "殷图网联", + "tag": "AI", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "833819", + "name": "颖泰生物", + "tag": "农业", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "832471", + "name": "美邦科技", + "tag": "化工", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "832651", + "name": "天罡股份", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "300499", + "name": "高澜股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300500", + "name": "启迪设计", + "tag": "房地产", + "reason": "" + }, + { + "code": "603377", + "name": "东方时尚", + "tag": "汽车", + "reason": "" + }, + { + "code": "002788", + "name": "鹭燕医药", + "tag": "医药", + "reason": "" + }, + { + "code": "002790", + "name": "瑞尔特", + "tag": "房地产", + "reason": "" + }, + { + "code": "834639", + "name": "晨光电缆", + "tag": "电力", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603520", + "name": "司太立", + "tag": "医药", + "reason": "" + }, + { + "code": "300503", + "name": "昊志机电", + "tag": "智能机器", + "reason": "" + }, + { + "code": "835174", + "name": "五新隧装", + "tag": "房地产", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603861", + "name": "白云电器", + "tag": "电力", + "reason": "" + }, + { + "code": "300506", + "name": "名家汇", + "tag": "房地产", + "reason": "" + }, + { + "code": "834765", + "name": "美之高", + "tag": "房地产", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "002792", + "name": "通宇通讯", + "tag": "AI", + "reason": "" + }, + { + "code": "002791", + "name": "坚朗五金", + "tag": "房地产", + "reason": "" + }, + { + "code": "835184", + "name": "国源科技", + "tag": "AI", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603028", + "name": "赛福天", + "tag": "智能机器", + "reason": "" + }, + { + "code": "832110", + "name": "雷特科技", + "tag": "消费电子", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603701", + "name": "德宏股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "603868", + "name": "飞科电器", + "tag": "大消费", + "reason": "" + }, + { + "code": "300508", + "name": "维宏股份", + "tag": "AI", + "reason": "" + }, + { + "code": "603726", + "name": "朗迪集团", + "tag": "大消费", + "reason": "" + }, + { + "code": "300509", + "name": "新美星", + "tag": "智能机器", + "reason": "" + }, + { + "code": "603822", + "name": "嘉澳环保", + "tag": "化工", + "reason": "" + }, + { + "code": "002795", + "name": "永和智控", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300507", + "name": "苏奥传感", + "tag": "汽车", + "reason": "" + }, + { + "code": "603528", + "name": "多伦科技", + "tag": "AI", + "reason": "" + }, + { + "code": "300511", + "name": "雪榕生物", + "tag": "农业", + "reason": "" + }, + { + "code": "837212", + "name": "智新电子", + "tag": "消费电子", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "002796", + "name": "世嘉科技", + "tag": "智能机器", + "reason": "" + }, + { + "code": "603339", + "name": "四方科技", + "tag": "智能机器", + "reason": "" + }, + { + "code": "834475", + "name": "三友科技", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "834261", + "name": "一诺威", + "tag": "化工", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "002798", + "name": "帝欧家居", + "tag": "房地产", + "reason": "" + }, + { + "code": "300512", + "name": "中亚股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "831834", + "name": "三维股份", + "tag": "化工", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "300513", + "name": "恒实科技", + "tag": "AI", + "reason": "" + }, + { + "code": "002800", + "name": "ST天顺", + "tag": "统一大市场", + "reason": "" + }, + { + "code": "833580", + "name": "科创新材", + "tag": "房地产", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603737", + "name": "三棵树", + "tag": "房地产", + "reason": "" + }, + { + "code": "836419", + "name": "万德股份", + "tag": "化工", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "002799", + "name": "环球印务", + "tag": "化工", + "reason": "" + }, + { + "code": "300515", + "name": "三德科技", + "tag": "智能机器", + "reason": "" + }, + { + "code": "601127", + "name": "赛力斯", + "tag": "汽车", + "reason": "" + }, + { + "code": "833030", + "name": "立方控股", + "tag": "AI", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "002802", + "name": "洪汇新材", + "tag": "化工", + "reason": "" + }, + { + "code": "603016", + "name": "新宏泰", + "tag": "电力", + "reason": "" + }, + { + "code": "832885", + "name": "星辰科技", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "836263", + "name": "中航泰达", + "tag": "公用", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "300521", + "name": "爱司凯", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300522", + "name": "世名科技", + "tag": "化工", + "reason": "" + }, + { + "code": "601966", + "name": "玲珑轮胎", + "tag": "化工", + "reason": "" + }, + { + "code": "603069", + "name": "海汽集团", + "tag": "公用", + "reason": "" + }, + { + "code": "300517", + "name": "海波重科", + "tag": "房地产", + "reason": "" + }, + { + "code": "832662", + "name": "方盛股份", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "300525", + "name": "博思软件", + "tag": "AI", + "reason": "" + }, + { + "code": "603322", + "name": "超讯通信", + "tag": "AI", + "reason": "" + }, + { + "code": "603569", + "name": "长久物流", + "tag": "统一大市场", + "reason": "" + }, + { + "code": "603159", + "name": "上海亚虹", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300535", + "name": "达威股份", + "tag": "化工", + "reason": "" + }, + { + "code": "002808", + "name": "ST恒久", + "tag": "VR", + "reason": "" + }, + { + "code": "300533", + "name": "冰川网络", + "tag": "AI", + "reason": "" + }, + { + "code": "603515", + "name": "欧普照明", + "tag": "大消费", + "reason": "" + }, + { + "code": "603031", + "name": "安孚科技", + "tag": "赛道", + "reason": "" + }, + { + "code": "300540", + "name": "蜀道装备", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002809", + "name": "红墙股份", + "tag": "化工", + "reason": "" + }, + { + "code": "603007", + "name": "ST花王", + "tag": "房地产", + "reason": "" + }, + { + "code": "002810", + "name": "山东赫达", + "tag": "化工", + "reason": "" + }, + { + "code": "300537", + "name": "广信材料", + "tag": "半导体", + "reason": "" + }, + { + "code": "300539", + "name": "横河精密", + "tag": "化工", + "reason": "" + }, + { + "code": "603090", + "name": "宏盛股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "838163", + "name": "方大新材", + "tag": "化工", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603843", + "name": "正平股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "838810", + "name": "春光药装", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "002811", + "name": "郑中设计", + "tag": "房地产", + "reason": "" + }, + { + "code": "601163", + "name": "三角轮胎", + "tag": "化工", + "reason": "" + }, + { + "code": "300541", + "name": "先进数通", + "tag": "AI", + "reason": "" + }, + { + "code": "603189", + "name": "网达软件", + "tag": "AI", + "reason": "" + }, + { + "code": "835179", + "name": "凯德石英", + "tag": "化工", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "601500", + "name": "通用股份", + "tag": "化工", + "reason": "" + }, + { + "code": "300536", + "name": "农尚环境", + "tag": "房地产", + "reason": "" + }, + { + "code": "603738", + "name": "泰晶科技", + "tag": "消费电子", + "reason": "" + }, + { + "code": "300546", + "name": "雄帝科技", + "tag": "AI", + "reason": "" + }, + { + "code": "300549", + "name": "优德精密", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300547", + "name": "川环科技", + "tag": "化工", + "reason": "" + }, + { + "code": "603887", + "name": "城地香江", + "tag": "AI", + "reason": "" + }, + { + "code": "603421", + "name": "鼎信通讯", + "tag": "AI", + "reason": "" + }, + { + "code": "002813", + "name": "路畅科技", + "tag": "汽车", + "reason": "" + }, + { + "code": "002815", + "name": "崇达技术", + "tag": "消费电子", + "reason": "" + }, + { + "code": "603313", + "name": "梦百合", + "tag": "房地产", + "reason": "" + }, + { + "code": "603816", + "name": "顾家家居", + "tag": "房地产", + "reason": "" + }, + { + "code": "836957", + "name": "汉维科技", + "tag": "化工", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "300551", + "name": "古鳌科技", + "tag": "AI", + "reason": "" + }, + { + "code": "300555", + "name": "ST路通", + "tag": "AI", + "reason": "" + }, + { + "code": "300550", + "name": "和仁科技", + "tag": "AI", + "reason": "" + }, + { + "code": "603859", + "name": "能科科技", + "tag": "AI", + "reason": "" + }, + { + "code": "300553", + "name": "集智股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300552", + "name": "万集科技", + "tag": "AI", + "reason": "" + }, + { + "code": "833075", + "name": "柏星龙", + "tag": "大消费", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603667", + "name": "五洲新春", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002816", + "name": "*ST和科", + "tag": "智能机器", + "reason": "" + }, + { + "code": "603258", + "name": "电魂网络", + "tag": "AI", + "reason": "" + }, + { + "code": "835207", + "name": "众诚科技", + "tag": "AI", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603716", + "name": "塞力医疗", + "tag": "医药", + "reason": "" + }, + { + "code": "300559", + "name": "佳发教育", + "tag": "AI", + "reason": "" + }, + { + "code": "300560", + "name": "中富通", + "tag": "AI", + "reason": "" + }, + { + "code": "836077", + "name": "吉林碳谷", + "tag": "化工", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "601882", + "name": "海天精工", + "tag": "智能机器", + "reason": "" + }, + { + "code": "839719", + "name": "宁新新材", + "tag": "化工", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603556", + "name": "海兴电力", + "tag": "电力", + "reason": "" + }, + { + "code": "603977", + "name": "国泰集团", + "tag": "化工", + "reason": "" + }, + { + "code": "300563", + "name": "神宇股份", + "tag": "AI", + "reason": "" + }, + { + "code": "300566", + "name": "激智科技", + "tag": "VR", + "reason": "" + }, + { + "code": "300561", + "name": "汇金科技", + "tag": "AI", + "reason": "" + }, + { + "code": "002821", + "name": "凯莱英", + "tag": "医药", + "reason": "" + }, + { + "code": "603727", + "name": "博迈科", + "tag": "公用", + "reason": "" + }, + { + "code": "835670", + "name": "数字人", + "tag": "AI", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "835857", + "name": "百甲科技", + "tag": "房地产", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603900", + "name": "莱绅通灵", + "tag": "大消费", + "reason": "" + }, + { + "code": "603336", + "name": "宏辉果蔬", + "tag": "农业", + "reason": "" + }, + { + "code": "603819", + "name": "神力股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300569", + "name": "天能重工", + "tag": "赛道", + "reason": "" + }, + { + "code": "839946", + "name": "华阳变速", + "tag": "汽车", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "002825", + "name": "纳尔股份", + "tag": "化工", + "reason": "" + }, + { + "code": "836504", + "name": "博迅生物", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "836149", + "name": "旭杰科技", + "tag": "房地产", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "836395", + "name": "朗鸿科技", + "tag": "消费电子", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603319", + "name": "湘油泵", + "tag": "汽车", + "reason": "" + }, + { + "code": "603660", + "name": "苏州科达", + "tag": "AI", + "reason": "" + }, + { + "code": "603559", + "name": "ST通脉", + "tag": "AI", + "reason": "" + }, + { + "code": "836414", + "name": "欧普泰", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "300572", + "name": "安车检测", + "tag": "专业服务", + "reason": "" + }, + { + "code": "603033", + "name": "三维股份", + "tag": "化工", + "reason": "" + }, + { + "code": "002828", + "name": "贝肯能源", + "tag": "公用", + "reason": "" + }, + { + "code": "300573", + "name": "兴齐眼药", + "tag": "医药", + "reason": "" + }, + { + "code": "603036", + "name": "如通股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002827", + "name": "高争民爆", + "tag": "化工", + "reason": "" + }, + { + "code": "002830", + "name": "名雕股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "603585", + "name": "苏利股份", + "tag": "农业", + "reason": "" + }, + { + "code": "603389", + "name": "亚振家居", + "tag": "房地产", + "reason": "" + }, + { + "code": "603098", + "name": "森特股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "002831", + "name": "裕同科技", + "tag": "大消费", + "reason": "" + }, + { + "code": "603878", + "name": "武进不锈", + "tag": "房地产", + "reason": "" + }, + { + "code": "300581", + "name": "晨曦航空", + "tag": "军工", + "reason": "" + }, + { + "code": "300576", + "name": "容大感光", + "tag": "半导体", + "reason": "" + }, + { + "code": "300575", + "name": "中旗股份", + "tag": "农业", + "reason": "" + }, + { + "code": "300577", + "name": "开润股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "835368", + "name": "连城数控", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603058", + "name": "永吉股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "002832", + "name": "比音勒芬", + "tag": "大消费", + "reason": "" + }, + { + "code": "838030", + "name": "德众汽车", + "tag": "汽车", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603298", + "name": "杭叉集团", + "tag": "房地产", + "reason": "" + }, + { + "code": "603218", + "name": "日月股份", + "tag": "赛道", + "reason": "" + }, + { + "code": "002833", + "name": "弘亚数控", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002835", + "name": "同为股份", + "tag": "AI", + "reason": "" + }, + { + "code": "002836", + "name": "新宏泽", + "tag": "大消费", + "reason": "" + }, + { + "code": "002837", + "name": "英维克", + "tag": "智能机器", + "reason": "" + }, + { + "code": "603239", + "name": "浙江仙通", + "tag": "汽车", + "reason": "" + }, + { + "code": "603929", + "name": "亚翔集成", + "tag": "房地产", + "reason": "" + }, + { + "code": "603444", + "name": "吉比特", + "tag": "AI", + "reason": "" + }, + { + "code": "603035", + "name": "常熟汽饰", + "tag": "汽车", + "reason": "" + }, + { + "code": "300588", + "name": "熙菱信息", + "tag": "AI", + "reason": "" + }, + { + "code": "603228", + "name": "景旺电子", + "tag": "消费电子", + "reason": "" + }, + { + "code": "300583", + "name": "赛托生物", + "tag": "医药", + "reason": "" + }, + { + "code": "002838", + "name": "道恩股份", + "tag": "化工", + "reason": "" + }, + { + "code": "300591", + "name": "万里马", + "tag": "大消费", + "reason": "" + }, + { + "code": "603579", + "name": "荣泰健康", + "tag": "大消费", + "reason": "" + }, + { + "code": "300590", + "name": "移为通信", + "tag": "AI", + "reason": "" + }, + { + "code": "603639", + "name": "海利尔", + "tag": "农业", + "reason": "" + }, + { + "code": "300584", + "name": "海辰药业", + "tag": "医药", + "reason": "" + }, + { + "code": "603039", + "name": "泛微网络", + "tag": "AI", + "reason": "" + }, + { + "code": "603690", + "name": "至纯科技", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300589", + "name": "江龙船艇", + "tag": "公用", + "reason": "" + }, + { + "code": "300593", + "name": "新雷能", + "tag": "赛道", + "reason": "" + }, + { + "code": "835985", + "name": "海泰新能", + "tag": "赛道", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603668", + "name": "天马科技", + "tag": "农业", + "reason": "" + }, + { + "code": "603165", + "name": "荣晟环保", + "tag": "大消费", + "reason": "" + }, + { + "code": "603337", + "name": "杰克股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002842", + "name": "翔鹭钨业", + "tag": "资源", + "reason": "" + }, + { + "code": "300596", + "name": "利安隆", + "tag": "化工", + "reason": "" + }, + { + "code": "603638", + "name": "艾迪精密", + "tag": "智能机器", + "reason": "" + }, + { + "code": "603037", + "name": "凯众股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "300592", + "name": "华凯易佰", + "tag": "房地产", + "reason": "" + }, + { + "code": "002843", + "name": "泰嘉股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "833455", + "name": "汇隆活塞", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "300599", + "name": "雄塑科技", + "tag": "房地产", + "reason": "" + }, + { + "code": "603429", + "name": "集友股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "603358", + "name": "华达科技", + "tag": "汽车", + "reason": "" + }, + { + "code": "603966", + "name": "法兰泰克", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002845", + "name": "同兴达", + "tag": "VR", + "reason": "" + }, + { + "code": "300578", + "name": "会畅通讯", + "tag": "AI", + "reason": "" + }, + { + "code": "300600", + "name": "国瑞科技", + "tag": "公用", + "reason": "" + }, + { + "code": "603089", + "name": "正裕工业", + "tag": "汽车", + "reason": "" + }, + { + "code": "300603", + "name": "立昂技术", + "tag": "AI", + "reason": "" + }, + { + "code": "603677", + "name": "奇精机械", + "tag": "大消费", + "reason": "" + }, + { + "code": "603360", + "name": "百傲化学", + "tag": "农业", + "reason": "" + }, + { + "code": "603177", + "name": "德创环保", + "tag": "公用", + "reason": "" + }, + { + "code": "603637", + "name": "镇海股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "300605", + "name": "恒锋信息", + "tag": "AI", + "reason": "" + }, + { + "code": "300606", + "name": "金太阳", + "tag": "智能机器", + "reason": "" + }, + { + "code": "603208", + "name": "江山欧派", + "tag": "房地产", + "reason": "" + }, + { + "code": "603615", + "name": "茶花股份", + "tag": "化工", + "reason": "" + }, + { + "code": "603330", + "name": "天洋新材", + "tag": "化工", + "reason": "" + }, + { + "code": "002848", + "name": "高斯贝尔", + "tag": "大消费", + "reason": "" + }, + { + "code": "603839", + "name": "安正时尚", + "tag": "大消费", + "reason": "" + }, + { + "code": "300615", + "name": "欣天科技", + "tag": "AI", + "reason": "" + }, + { + "code": "603603", + "name": "*ST博天", + "tag": "公用", + "reason": "" + }, + { + "code": "002849", + "name": "威星智能", + "tag": "智能机器", + "reason": "" + }, + { + "code": "603817", + "name": "海峡环保", + "tag": "公用", + "reason": "" + }, + { + "code": "300611", + "name": "美力科技", + "tag": "汽车", + "reason": "" + }, + { + "code": "833533", + "name": "骏创科技", + "tag": "汽车", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "600939", + "name": "重庆建工", + "tag": "房地产", + "reason": "" + }, + { + "code": "603238", + "name": "诺邦股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "300617", + "name": "安靠智电", + "tag": "电力", + "reason": "" + }, + { + "code": "603138", + "name": "海量数据", + "tag": "AI", + "reason": "" + }, + { + "code": "603578", + "name": "三星新材", + "tag": "大消费", + "reason": "" + }, + { + "code": "002851", + "name": "麦格米特", + "tag": "赛道", + "reason": "" + }, + { + "code": "836942", + "name": "恒立钻具", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603908", + "name": "牧高笛", + "tag": "大消费", + "reason": "" + }, + { + "code": "300616", + "name": "尚品宅配", + "tag": "房地产", + "reason": "" + }, + { + "code": "603991", + "name": "至正股份", + "tag": "化工", + "reason": "" + }, + { + "code": "002853", + "name": "皮阿诺", + "tag": "房地产", + "reason": "" + }, + { + "code": "603630", + "name": "拉芳家化", + "tag": "大消费", + "reason": "" + }, + { + "code": "603903", + "name": "中持股份", + "tag": "公用", + "reason": "" + }, + { + "code": "300621", + "name": "维业股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "603179", + "name": "新泉股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "300625", + "name": "三雄极光", + "tag": "大消费", + "reason": "" + }, + { + "code": "300628", + "name": "亿联网络", + "tag": "AI", + "reason": "" + }, + { + "code": "002856", + "name": "美芝股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "300626", + "name": "华瑞股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300627", + "name": "华测导航", + "tag": "AI", + "reason": "" + }, + { + "code": "002857", + "name": "三晖电气", + "tag": "电力", + "reason": "" + }, + { + "code": "603768", + "name": "常青股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "603388", + "name": "元成股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "002774", + "name": "快意电梯", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300629", + "name": "新劲刚", + "tag": "AI", + "reason": "" + }, + { + "code": "603833", + "name": "欧派家居", + "tag": "房地产", + "reason": "" + }, + { + "code": "603178", + "name": "圣龙股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "300630", + "name": "普利制药", + "tag": "医药", + "reason": "" + }, + { + "code": "601228", + "name": "广州港", + "tag": "公用", + "reason": "" + }, + { + "code": "832000", + "name": "安徽凤凰", + "tag": "汽车", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603041", + "name": "美思德", + "tag": "化工", + "reason": "" + }, + { + "code": "601200", + "name": "上海环境", + "tag": "公用", + "reason": "" + }, + { + "code": "300636", + "name": "同和药业", + "tag": "医药", + "reason": "" + }, + { + "code": "300635", + "name": "中达安", + "tag": "房地产", + "reason": "" + }, + { + "code": "603385", + "name": "惠达卫浴", + "tag": "房地产", + "reason": "" + }, + { + "code": "603586", + "name": "金麒麟", + "tag": "汽车", + "reason": "" + }, + { + "code": "300632", + "name": "光莆股份", + "tag": "VR", + "reason": "" + }, + { + "code": "603538", + "name": "美诺华", + "tag": "医药", + "reason": "" + }, + { + "code": "603078", + "name": "江化微", + "tag": "半导体", + "reason": "" + }, + { + "code": "838924", + "name": "广脉科技", + "tag": "AI", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "839792", + "name": "东和新材", + "tag": "化工", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "002862", + "name": "实丰文化", + "tag": "传媒", + "reason": "" + }, + { + "code": "300637", + "name": "扬帆新材", + "tag": "半导体", + "reason": "" + }, + { + "code": "002860", + "name": "星帅尔", + "tag": "大消费", + "reason": "" + }, + { + "code": "603797", + "name": "联泰环保", + "tag": "公用", + "reason": "" + }, + { + "code": "300638", + "name": "广和通", + "tag": "AI", + "reason": "" + }, + { + "code": "603050", + "name": "科林电气", + "tag": "电力", + "reason": "" + }, + { + "code": "300640", + "name": "德艺文创", + "tag": "大消费", + "reason": "" + }, + { + "code": "837242", + "name": "建邦科技", + "tag": "大消费", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603225", + "name": "新凤鸣", + "tag": "化工", + "reason": "" + }, + { + "code": "002863", + "name": "今飞凯达", + "tag": "汽车", + "reason": "" + }, + { + "code": "603803", + "name": "瑞斯康达", + "tag": "AI", + "reason": "" + }, + { + "code": "603081", + "name": "大丰实业", + "tag": "房地产", + "reason": "" + }, + { + "code": "603232", + "name": "格尔软件", + "tag": "AI", + "reason": "" + }, + { + "code": "300554", + "name": "三超新材", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300645", + "name": "正元智慧", + "tag": "AI", + "reason": "" + }, + { + "code": "002865", + "name": "钧达股份", + "tag": "赛道", + "reason": "" + }, + { + "code": "603920", + "name": "世运电路", + "tag": "消费电子", + "reason": "" + }, + { + "code": "300514", + "name": "友讯达", + "tag": "电力", + "reason": "" + }, + { + "code": "603787", + "name": "新日股份", + "tag": "公用", + "reason": "" + }, + { + "code": "002867", + "name": "周大生", + "tag": "大消费", + "reason": "" + }, + { + "code": "603320", + "name": "迪贝电气", + "tag": "智能机器", + "reason": "" + }, + { + "code": "838227", + "name": "美登科技", + "tag": "AI", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603505", + "name": "金石资源", + "tag": "公用", + "reason": "" + }, + { + "code": "300650", + "name": "太龙股份", + "tag": "VR", + "reason": "" + }, + { + "code": "603985", + "name": "恒润股份", + "tag": "赛道", + "reason": "" + }, + { + "code": "300649", + "name": "杭州园林", + "tag": "房地产", + "reason": "" + }, + { + "code": "300643", + "name": "万通智控", + "tag": "汽车", + "reason": "" + }, + { + "code": "603229", + "name": "奥翔药业", + "tag": "医药", + "reason": "" + }, + { + "code": "603728", + "name": "鸣志电器", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300651", + "name": "金陵体育", + "tag": "大消费", + "reason": "" + }, + { + "code": "603926", + "name": "铁流股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "837592", + "name": "华信永道", + "tag": "AI", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603113", + "name": "金能科技", + "tag": "化工", + "reason": "" + }, + { + "code": "603086", + "name": "先达股份", + "tag": "农业", + "reason": "" + }, + { + "code": "002871", + "name": "伟隆股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "603180", + "name": "金牌厨柜", + "tag": "房地产", + "reason": "" + }, + { + "code": "601952", + "name": "苏垦农发", + "tag": "农业", + "reason": "" + }, + { + "code": "002870", + "name": "香山股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "603488", + "name": "展鹏科技", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300652", + "name": "雷迪克", + "tag": "汽车", + "reason": "" + }, + { + "code": "603269", + "name": "海鸥股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "603758", + "name": "秦安股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "603197", + "name": "保隆科技", + "tag": "汽车", + "reason": "" + }, + { + "code": "300656", + "name": "民德电子", + "tag": "消费电子", + "reason": "" + }, + { + "code": "835305", + "name": "云创数据", + "tag": "AI", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603383", + "name": "顶点软件", + "tag": "AI", + "reason": "" + }, + { + "code": "603855", + "name": "华荣股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "603580", + "name": "艾艾精工", + "tag": "化工", + "reason": "" + }, + { + "code": "300659", + "name": "中孚信息", + "tag": "AI", + "reason": "" + }, + { + "code": "603042", + "name": "华脉科技", + "tag": "AI", + "reason": "" + }, + { + "code": "300658", + "name": "延江股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "300660", + "name": "江苏雷利", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002878", + "name": "元隆雅图", + "tag": "专业服务", + "reason": "" + }, + { + "code": "834770", + "name": "艾能聚", + "tag": "赛道", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "300665", + "name": "飞鹿股份", + "tag": "化工", + "reason": "" + }, + { + "code": "603879", + "name": "永悦科技", + "tag": "化工", + "reason": "" + }, + { + "code": "603226", + "name": "菲林格尔", + "tag": "房地产", + "reason": "" + }, + { + "code": "603326", + "name": "我乐家居", + "tag": "房地产", + "reason": "" + }, + { + "code": "834407", + "name": "驰诚股份", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603316", + "name": "诚邦股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "300667", + "name": "必创科技", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300668", + "name": "杰恩设计", + "tag": "房地产", + "reason": "" + }, + { + "code": "836260", + "name": "中寰股份", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "838971", + "name": "天马新材", + "tag": "化工", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603286", + "name": "日盈电子", + "tag": "汽车", + "reason": "" + }, + { + "code": "002886", + "name": "沃特股份", + "tag": "化工", + "reason": "" + }, + { + "code": "871642", + "name": "通易航天", + "tag": "化工", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603938", + "name": "三孚股份", + "tag": "化工", + "reason": "" + }, + { + "code": "300669", + "name": "沪宁股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "603801", + "name": "志邦家居", + "tag": "房地产", + "reason": "" + }, + { + "code": "834014", + "name": "特瑞斯", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "300670", + "name": "大烨智能", + "tag": "电力", + "reason": "" + }, + { + "code": "603331", + "name": "百达精工", + "tag": "智能机器", + "reason": "" + }, + { + "code": "833427", + "name": "华维设计", + "tag": "房地产", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "002879", + "name": "长缆科技", + "tag": "电力", + "reason": "" + }, + { + "code": "603305", + "name": "旭升集团", + "tag": "汽车", + "reason": "" + }, + { + "code": "603757", + "name": "大元泵业", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300673", + "name": "佩蒂股份", + "tag": "农业", + "reason": "" + }, + { + "code": "002884", + "name": "凌霄泵业", + "tag": "智能机器", + "reason": "" + }, + { + "code": "603612", + "name": "索通发展", + "tag": "化工", + "reason": "" + }, + { + "code": "603707", + "name": "健友股份", + "tag": "医药", + "reason": "" + }, + { + "code": "300675", + "name": "建科院", + "tag": "房地产", + "reason": "" + }, + { + "code": "603676", + "name": "卫信康", + "tag": "医药", + "reason": "" + }, + { + "code": "300680", + "name": "隆盛科技", + "tag": "汽车", + "reason": "" + }, + { + "code": "300681", + "name": "英搏尔", + "tag": "汽车", + "reason": "" + }, + { + "code": "603730", + "name": "岱美股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "603063", + "name": "禾望电气", + "tag": "赛道", + "reason": "" + }, + { + "code": "603233", + "name": "大参林", + "tag": "医药", + "reason": "" + }, + { + "code": "002889", + "name": "东方嘉盛", + "tag": "统一大市场", + "reason": "" + }, + { + "code": "603357", + "name": "设计总院", + "tag": "房地产", + "reason": "" + }, + { + "code": "300682", + "name": "朗新集团", + "tag": "AI", + "reason": "" + }, + { + "code": "002887", + "name": "绿茵生态", + "tag": "公用", + "reason": "" + }, + { + "code": "002890", + "name": "弘宇股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300687", + "name": "赛意信息", + "tag": "AI", + "reason": "" + }, + { + "code": "603535", + "name": "嘉诚国际", + "tag": "统一大市场", + "reason": "" + }, + { + "code": "300690", + "name": "双一科技", + "tag": "赛道", + "reason": "" + }, + { + "code": "603458", + "name": "勘设股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "300689", + "name": "澄天伟业", + "tag": "AI", + "reason": "" + }, + { + "code": "603602", + "name": "纵横通信", + "tag": "AI", + "reason": "" + }, + { + "code": "300688", + "name": "创业黑马", + "tag": "教育", + "reason": "" + }, + { + "code": "601326", + "name": "秦港股份", + "tag": "公用", + "reason": "" + }, + { + "code": "835892", + "name": "中科美菱", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603776", + "name": "永安行", + "tag": "专业服务", + "reason": "" + }, + { + "code": "603129", + "name": "春风动力", + "tag": "公用", + "reason": "" + }, + { + "code": "603557", + "name": "ST起步", + "tag": "大消费", + "reason": "" + }, + { + "code": "002891", + "name": "中宠股份", + "tag": "农业", + "reason": "" + }, + { + "code": "300692", + "name": "中环环保", + "tag": "公用", + "reason": "" + }, + { + "code": "300696", + "name": "爱乐达", + "tag": "军工", + "reason": "" + }, + { + "code": "300693", + "name": "盛弘股份", + "tag": "赛道", + "reason": "" + }, + { + "code": "603181", + "name": "皇马科技", + "tag": "化工", + "reason": "" + }, + { + "code": "002896", + "name": "中大力德", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300698", + "name": "万马科技", + "tag": "AI", + "reason": "" + }, + { + "code": "603359", + "name": "东珠生态", + "tag": "公用", + "reason": "" + }, + { + "code": "300699", + "name": "光威复材", + "tag": "化工", + "reason": "" + }, + { + "code": "603500", + "name": "祥和实业", + "tag": "公用", + "reason": "" + }, + { + "code": "603183", + "name": "建研院", + "tag": "专业服务", + "reason": "" + }, + { + "code": "836892", + "name": "广咨国际", + "tag": "房地产", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603725", + "name": "天安新材", + "tag": "化工", + "reason": "" + }, + { + "code": "603882", + "name": "金域医学", + "tag": "医药", + "reason": "" + }, + { + "code": "300695", + "name": "兆丰股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "603277", + "name": "银都股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "603386", + "name": "骏亚科技", + "tag": "消费电子", + "reason": "" + }, + { + "code": "300700", + "name": "岱勒新材", + "tag": "智能机器", + "reason": "" + }, + { + "code": "603321", + "name": "梅轮电梯", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002899", + "name": "英派斯", + "tag": "大消费", + "reason": "" + }, + { + "code": "300701", + "name": "森霸传感", + "tag": "VR", + "reason": "" + }, + { + "code": "002893", + "name": "京能热力", + "tag": "公用", + "reason": "" + }, + { + "code": "834599", + "name": "同力股份", + "tag": "房地产", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603813", + "name": "原尚股份", + "tag": "统一大市场", + "reason": "" + }, + { + "code": "300703", + "name": "创源股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "300702", + "name": "天宇股份", + "tag": "医药", + "reason": "" + }, + { + "code": "603055", + "name": "台华新材", + "tag": "大消费", + "reason": "" + }, + { + "code": "603363", + "name": "傲农生物", + "tag": "农业", + "reason": "" + }, + { + "code": "603136", + "name": "天目湖", + "tag": "大消费", + "reason": "" + }, + { + "code": "603378", + "name": "亚士创能", + "tag": "房地产", + "reason": "" + }, + { + "code": "603367", + "name": "辰欣药业", + "tag": "医药", + "reason": "" + }, + { + "code": "300705", + "name": "九典制药", + "tag": "医药", + "reason": "" + }, + { + "code": "603110", + "name": "东方材料", + "tag": "化工", + "reason": "" + }, + { + "code": "002906", + "name": "华阳集团", + "tag": "汽车", + "reason": "" + }, + { + "code": "300708", + "name": "聚灿光电", + "tag": "VR", + "reason": "" + }, + { + "code": "603829", + "name": "洛凯股份", + "tag": "电力", + "reason": "" + }, + { + "code": "300710", + "name": "万隆光电", + "tag": "AI", + "reason": "" + }, + { + "code": "603683", + "name": "晶华新材", + "tag": "化工", + "reason": "" + }, + { + "code": "002908", + "name": "德生科技", + "tag": "AI", + "reason": "" + }, + { + "code": "603922", + "name": "金鸿顺", + "tag": "汽车", + "reason": "" + }, + { + "code": "603607", + "name": "京华激光", + "tag": "大消费", + "reason": "" + }, + { + "code": "300715", + "name": "凯伦股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "002909", + "name": "集泰股份", + "tag": "化工", + "reason": "" + }, + { + "code": "603260", + "name": "合盛硅业", + "tag": "化工", + "reason": "" + }, + { + "code": "300712", + "name": "永福股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "603912", + "name": "佳力图", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300711", + "name": "广哈通信", + "tag": "AI", + "reason": "" + }, + { + "code": "300713", + "name": "英可瑞", + "tag": "赛道", + "reason": "" + }, + { + "code": "603856", + "name": "东宏股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "603507", + "name": "振江股份", + "tag": "赛道", + "reason": "" + }, + { + "code": "300720", + "name": "海川智能", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300717", + "name": "华信新材", + "tag": "化工", + "reason": "" + }, + { + "code": "300718", + "name": "长盛轴承", + "tag": "房地产", + "reason": "" + }, + { + "code": "300716", + "name": "泉为科技", + "tag": "化工", + "reason": "" + }, + { + "code": "300719", + "name": "安达维尔", + "tag": "军工", + "reason": "" + }, + { + "code": "603916", + "name": "苏博特", + "tag": "化工", + "reason": "" + }, + { + "code": "300722", + "name": "新余国科", + "tag": "军工", + "reason": "" + }, + { + "code": "603076", + "name": "乐惠国际", + "tag": "智能机器", + "reason": "" + }, + { + "code": "603278", + "name": "大业股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "839790", + "name": "联迪信息", + "tag": "AI", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603605", + "name": "珀莱雅", + "tag": "大消费", + "reason": "" + }, + { + "code": "300721", + "name": "怡达股份", + "tag": "化工", + "reason": "" + }, + { + "code": "300723", + "name": "一品红", + "tag": "医药", + "reason": "" + }, + { + "code": "600933", + "name": "爱柯迪", + "tag": "汽车", + "reason": "" + }, + { + "code": "603619", + "name": "中曼石油", + "tag": "公用", + "reason": "" + }, + { + "code": "603365", + "name": "水星家纺", + "tag": "大消费", + "reason": "" + }, + { + "code": "300726", + "name": "宏达电子", + "tag": "消费电子", + "reason": "" + }, + { + "code": "603685", + "name": "晨丰科技", + "tag": "VR", + "reason": "" + }, + { + "code": "300727", + "name": "润禾材料", + "tag": "化工", + "reason": "" + }, + { + "code": "836221", + "name": "易实精密", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "870866", + "name": "绿亨科技", + "tag": "农业", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603809", + "name": "豪能股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "871970", + "name": "大禹生物", + "tag": "农业", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603848", + "name": "好太太", + "tag": "大消费", + "reason": "" + }, + { + "code": "002913", + "name": "奥士康", + "tag": "消费电子", + "reason": "" + }, + { + "code": "603917", + "name": "合力科技", + "tag": "汽车", + "reason": "" + }, + { + "code": "300730", + "name": "科创信息", + "tag": "AI", + "reason": "" + }, + { + "code": "300731", + "name": "科创新源", + "tag": "化工", + "reason": "" + }, + { + "code": "300732", + "name": "设研院", + "tag": "房地产", + "reason": "" + }, + { + "code": "837663", + "name": "明阳科技", + "tag": "汽车", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "872351", + "name": "华光源海", + "tag": "统一大市场", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603477", + "name": "巨星农牧", + "tag": "农业", + "reason": "" + }, + { + "code": "837006", + "name": "晟楠科技", + "tag": "军工", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "002918", + "name": "蒙娜丽莎", + "tag": "房地产", + "reason": "" + }, + { + "code": "002920", + "name": "德赛西威", + "tag": "汽车", + "reason": "" + }, + { + "code": "838701", + "name": "豪声电子", + "tag": "消费电子", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "002921", + "name": "联诚精密", + "tag": "汽车", + "reason": "" + }, + { + "code": "870976", + "name": "视声智能", + "tag": "AI", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603655", + "name": "朗博科技", + "tag": "汽车", + "reason": "" + }, + { + "code": "002922", + "name": "伊戈尔", + "tag": "消费电子", + "reason": "" + }, + { + "code": "603161", + "name": "科华控股", + "tag": "汽车", + "reason": "" + }, + { + "code": "300664", + "name": "鹏鹞环保", + "tag": "公用", + "reason": "" + }, + { + "code": "300736", + "name": "百邦科技", + "tag": "AI", + "reason": "" + }, + { + "code": "603056", + "name": "德邦股份", + "tag": "统一大市场", + "reason": "" + }, + { + "code": "300733", + "name": "西菱动力", + "tag": "汽车", + "reason": "" + }, + { + "code": "836675", + "name": "秉扬科技", + "tag": "化工", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "830946", + "name": "森萱医药", + "tag": "医药", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603356", + "name": "华菱精工", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300737", + "name": "科顺股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "603506", + "name": "南都物业", + "tag": "房地产", + "reason": "" + }, + { + "code": "300739", + "name": "明阳电路", + "tag": "消费电子", + "reason": "" + }, + { + "code": "603516", + "name": "淳中科技", + "tag": "AI", + "reason": "" + }, + { + "code": "603871", + "name": "嘉友国际", + "tag": "统一大市场", + "reason": "" + }, + { + "code": "300644", + "name": "南京聚隆", + "tag": "化工", + "reason": "" + }, + { + "code": "603709", + "name": "中源家居", + "tag": "房地产", + "reason": "" + }, + { + "code": "002927", + "name": "泰永长征", + "tag": "电力", + "reason": "" + }, + { + "code": "870436", + "name": "大地电气", + "tag": "汽车", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603712", + "name": "七一二", + "tag": "AI", + "reason": "" + }, + { + "code": "603680", + "name": "今创集团", + "tag": "公用", + "reason": "" + }, + { + "code": "600901", + "name": "江苏金租", + "tag": "金融", + "reason": "" + }, + { + "code": "603059", + "name": "倍加洁", + "tag": "大消费", + "reason": "" + }, + { + "code": "002928", + "name": "华夏航空", + "tag": "公用", + "reason": "" + }, + { + "code": "833943", + "name": "优机股份", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "836717", + "name": "瑞星股份", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "300634", + "name": "彩讯股份", + "tag": "AI", + "reason": "" + }, + { + "code": "600929", + "name": "雪天盐业", + "tag": "化工", + "reason": "" + }, + { + "code": "002930", + "name": "宏川智慧", + "tag": "统一大市场", + "reason": "" + }, + { + "code": "838670", + "name": "恒进感应", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "300504", + "name": "天邑股份", + "tag": "AI", + "reason": "" + }, + { + "code": "603773", + "name": "沃格光电", + "tag": "VR", + "reason": "" + }, + { + "code": "871396", + "name": "常辅股份", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603733", + "name": "仙鹤股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "603348", + "name": "文灿股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "603596", + "name": "伯特利", + "tag": "汽车", + "reason": "" + }, + { + "code": "300743", + "name": "天地数码", + "tag": "化工", + "reason": "" + }, + { + "code": "603259", + "name": "药明康德", + "tag": "医药", + "reason": "" + }, + { + "code": "300742", + "name": "*ST越博", + "tag": "汽车", + "reason": "" + }, + { + "code": "300454", + "name": "深信服", + "tag": "AI", + "reason": "" + }, + { + "code": "300745", + "name": "欣锐科技", + "tag": "汽车", + "reason": "" + }, + { + "code": "300746", + "name": "汉嘉设计", + "tag": "房地产", + "reason": "" + }, + { + "code": "601330", + "name": "绿色动力", + "tag": "公用", + "reason": "" + }, + { + "code": "603666", + "name": "亿嘉和", + "tag": "智能机器", + "reason": "" + }, + { + "code": "830896", + "name": "旺成科技", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603587", + "name": "地素时尚", + "tag": "大消费", + "reason": "" + }, + { + "code": "603650", + "name": "彤程新材", + "tag": "化工", + "reason": "" + }, + { + "code": "603105", + "name": "芯能科技", + "tag": "赛道", + "reason": "" + }, + { + "code": "603713", + "name": "密尔克卫", + "tag": "统一大市场", + "reason": "" + }, + { + "code": "601869", + "name": "长飞光纤", + "tag": "AI", + "reason": "" + }, + { + "code": "603657", + "name": "春光科技", + "tag": "大消费", + "reason": "" + }, + { + "code": "601606", + "name": "长城军工", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300724", + "name": "捷佳伟创", + "tag": "智能机器", + "reason": "" + }, + { + "code": "603192", + "name": "汇得科技", + "tag": "化工", + "reason": "" + }, + { + "code": "002933", + "name": "新兴装备", + "tag": "军工", + "reason": "" + }, + { + "code": "603297", + "name": "永新光学", + "tag": "VR", + "reason": "" + }, + { + "code": "603790", + "name": "雅运股份", + "tag": "化工", + "reason": "" + }, + { + "code": "002938", + "name": "鹏鼎控股", + "tag": "消费电子", + "reason": "" + }, + { + "code": "603583", + "name": "捷昌驱动", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300748", + "name": "金力永磁", + "tag": "资源", + "reason": "" + }, + { + "code": "300749", + "name": "顶固集创", + "tag": "房地产", + "reason": "" + }, + { + "code": "300694", + "name": "蠡湖股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "002940", + "name": "昂利康", + "tag": "医药", + "reason": "" + }, + { + "code": "300674", + "name": "宇信科技", + "tag": "AI", + "reason": "" + }, + { + "code": "871553", + "name": "凯腾精工", + "tag": "大消费", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "002941", + "name": "新疆交建", + "tag": "房地产", + "reason": "" + }, + { + "code": "603187", + "name": "海容冷链", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002943", + "name": "宇晶股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "839680", + "name": "广道数字", + "tag": "AI", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "300752", + "name": "隆利科技", + "tag": "VR", + "reason": "" + }, + { + "code": "833394", + "name": "民士达", + "tag": "大消费", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "838171", + "name": "邦德股份", + "tag": "汽车", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "832089", + "name": "禾昌聚合", + "tag": "化工", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "837046", + "name": "亿能电力", + "tag": "电力", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "831855", + "name": "浙江大农", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603185", + "name": "弘元绿能", + "tag": "赛道", + "reason": "" + }, + { + "code": "300756", + "name": "金马游乐", + "tag": "大消费", + "reason": "" + }, + { + "code": "300757", + "name": "罗博特科", + "tag": "智能机器", + "reason": "" + }, + { + "code": "603121", + "name": "华培动力", + "tag": "汽车", + "reason": "" + }, + { + "code": "872374", + "name": "云里物里", + "tag": "消费电子", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "870299", + "name": "灿能电力", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603332", + "name": "苏州龙杰", + "tag": "化工", + "reason": "" + }, + { + "code": "601298", + "name": "青岛港", + "tag": "公用", + "reason": "" + }, + { + "code": "603700", + "name": "宁水集团", + "tag": "智能机器", + "reason": "" + }, + { + "code": "601615", + "name": "明阳智能", + "tag": "赛道", + "reason": "" + }, + { + "code": "300759", + "name": "康龙化成", + "tag": "医药", + "reason": "" + }, + { + "code": "300755", + "name": "华致酒行", + "tag": "大消费", + "reason": "" + }, + { + "code": "603351", + "name": "威尔药业", + "tag": "医药", + "reason": "" + }, + { + "code": "601865", + "name": "福莱特", + "tag": "化工", + "reason": "" + }, + { + "code": "300761", + "name": "立华股份", + "tag": "农业", + "reason": "" + }, + { + "code": "603956", + "name": "威派格", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300758", + "name": "七彩化学", + "tag": "化工", + "reason": "" + }, + { + "code": "002949", + "name": "华阳国际", + "tag": "房地产", + "reason": "" + }, + { + "code": "300762", + "name": "上海瀚讯", + "tag": "AI", + "reason": "" + }, + { + "code": "002951", + "name": "ST金时", + "tag": "化工", + "reason": "" + }, + { + "code": "300766", + "name": "每日互动", + "tag": "AI", + "reason": "" + }, + { + "code": "603681", + "name": "永冠新材", + "tag": "化工", + "reason": "" + }, + { + "code": "300767", + "name": "震安科技", + "tag": "化工", + "reason": "" + }, + { + "code": "838275", + "name": "驱动力", + "tag": "农业", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "300768", + "name": "迪普科技", + "tag": "AI", + "reason": "" + }, + { + "code": "300771", + "name": "智莱科技", + "tag": "AI", + "reason": "" + }, + { + "code": "300773", + "name": "拉卡拉", + "tag": "AI", + "reason": "" + }, + { + "code": "300772", + "name": "运达股份", + "tag": "赛道", + "reason": "" + }, + { + "code": "603967", + "name": "中创物流", + "tag": "统一大市场", + "reason": "" + }, + { + "code": "002953", + "name": "日丰股份", + "tag": "电力", + "reason": "" + }, + { + "code": "300778", + "name": "新城市", + "tag": "房地产", + "reason": "" + }, + { + "code": "603267", + "name": "鸿远电子", + "tag": "消费电子", + "reason": "" + }, + { + "code": "600989", + "name": "宝丰能源", + "tag": "化工", + "reason": "" + }, + { + "code": "300777", + "name": "中简科技", + "tag": "化工", + "reason": "" + }, + { + "code": "300776", + "name": "帝尔激光", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300775", + "name": "三角防务", + "tag": "军工", + "reason": "" + }, + { + "code": "603982", + "name": "泉峰汽车", + "tag": "汽车", + "reason": "" + }, + { + "code": "300779", + "name": "惠城环保", + "tag": "公用", + "reason": "" + }, + { + "code": "002955", + "name": "鸿合科技", + "tag": "VR", + "reason": "" + }, + { + "code": "872541", + "name": "铁大科技", + "tag": "公用", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "300780", + "name": "德恩精工", + "tag": "智能机器", + "reason": "" + }, + { + "code": "603915", + "name": "国茂股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "603217", + "name": "元利科技", + "tag": "化工", + "reason": "" + }, + { + "code": "300594", + "name": "朗进科技", + "tag": "公用", + "reason": "" + }, + { + "code": "836807", + "name": "奔朗新材", + "tag": "化工", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "300785", + "name": "值得买", + "tag": "AI", + "reason": "" + }, + { + "code": "603236", + "name": "移远通信", + "tag": "AI", + "reason": "" + }, + { + "code": "603256", + "name": "宏和科技", + "tag": "化工", + "reason": "" + }, + { + "code": "688020", + "name": "方邦股份", + "tag": "消费电子", + "reason": "" + }, + { + "code": "688001", + "name": "华兴源创", + "tag": "智能机器", + "reason": "" + }, + { + "code": "688066", + "name": "航天宏图", + "tag": "AI", + "reason": "" + }, + { + "code": "688333", + "name": "铂力特", + "tag": "智能机器", + "reason": "" + }, + { + "code": "688011", + "name": "新光光电", + "tag": "VR", + "reason": "" + }, + { + "code": "688010", + "name": "福光股份", + "tag": "VR", + "reason": "" + }, + { + "code": "688028", + "name": "沃尔德", + "tag": "智能机器", + "reason": "" + }, + { + "code": "688019", + "name": "安集科技", + "tag": "半导体", + "reason": "" + }, + { + "code": "688088", + "name": "虹软科技", + "tag": "AI", + "reason": "" + }, + { + "code": "688033", + "name": "天宜上佳", + "tag": "公用", + "reason": "" + }, + { + "code": "300786", + "name": "国林科技", + "tag": "公用", + "reason": "" + }, + { + "code": "603687", + "name": "大胜达", + "tag": "化工", + "reason": "" + }, + { + "code": "603279", + "name": "景津装备", + "tag": "公用", + "reason": "" + }, + { + "code": "603613", + "name": "国联股份", + "tag": "AI", + "reason": "" + }, + { + "code": "603662", + "name": "柯力传感", + "tag": "智能机器", + "reason": "" + }, + { + "code": "688188", + "name": "柏楚电子", + "tag": "AI", + "reason": "" + }, + { + "code": "002960", + "name": "青鸟消防", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002959", + "name": "小熊电器", + "tag": "大消费", + "reason": "" + }, + { + "code": "603992", + "name": "松霖科技", + "tag": "房地产", + "reason": "" + }, + { + "code": "300789", + "name": "唐源电气", + "tag": "AI", + "reason": "" + }, + { + "code": "002961", + "name": "瑞达期货", + "tag": "金融", + "reason": "" + }, + { + "code": "688168", + "name": "安博通", + "tag": "AI", + "reason": "" + }, + { + "code": "300790", + "name": "宇瞳光学", + "tag": "VR", + "reason": "" + }, + { + "code": "688030", + "name": "山石网科", + "tag": "AI", + "reason": "" + }, + { + "code": "603786", + "name": "科博达", + "tag": "汽车", + "reason": "" + }, + { + "code": "603815", + "name": "交建股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "002965", + "name": "祥鑫科技", + "tag": "智能机器", + "reason": "" + }, + { + "code": "688098", + "name": "申联生物", + "tag": "农业", + "reason": "" + }, + { + "code": "002963", + "name": "豪尔赛", + "tag": "房地产", + "reason": "" + }, + { + "code": "300799", + "name": "*ST左江", + "tag": "AI", + "reason": "" + }, + { + "code": "688369", + "name": "致远互联", + "tag": "AI", + "reason": "" + }, + { + "code": "688058", + "name": "宝兰德", + "tag": "AI", + "reason": "" + }, + { + "code": "688023", + "name": "安恒信息", + "tag": "AI", + "reason": "" + }, + { + "code": "688202", + "name": "美迪西", + "tag": "医药", + "reason": "" + }, + { + "code": "688199", + "name": "久日新材", + "tag": "化工", + "reason": "" + }, + { + "code": "688299", + "name": "长阳科技", + "tag": "VR", + "reason": "" + }, + { + "code": "688021", + "name": "奥福环保", + "tag": "公用", + "reason": "" + }, + { + "code": "688288", + "name": "鸿泉物联", + "tag": "AI", + "reason": "" + }, + { + "code": "300800", + "name": "力合科技", + "tag": "公用", + "reason": "" + }, + { + "code": "002967", + "name": "广电计量", + "tag": "专业服务", + "reason": "" + }, + { + "code": "300564", + "name": "筑博设计", + "tag": "房地产", + "reason": "" + }, + { + "code": "603489", + "name": "八方股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "688101", + "name": "三达膜", + "tag": "公用", + "reason": "" + }, + { + "code": "688300", + "name": "联瑞新材", + "tag": "化工", + "reason": "" + }, + { + "code": "300796", + "name": "贝斯美", + "tag": "农业", + "reason": "" + }, + { + "code": "688111", + "name": "金山办公", + "tag": "AI", + "reason": "" + }, + { + "code": "300803", + "name": "指南针", + "tag": "AI", + "reason": "" + }, + { + "code": "688196", + "name": "卓越新能", + "tag": "化工", + "reason": "" + }, + { + "code": "300806", + "name": "斯迪克", + "tag": "化工", + "reason": "" + }, + { + "code": "300808", + "name": "久量股份", + "tag": "VR", + "reason": "" + }, + { + "code": "688310", + "name": "迈得医疗", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002968", + "name": "新大正", + "tag": "房地产", + "reason": "" + }, + { + "code": "688357", + "name": "建龙微纳", + "tag": "化工", + "reason": "" + }, + { + "code": "688118", + "name": "普元信息", + "tag": "AI", + "reason": "" + }, + { + "code": "300809", + "name": "华辰装备", + "tag": "智能机器", + "reason": "" + }, + { + "code": "831906", + "name": "舜宇精工", + "tag": "汽车", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "688258", + "name": "卓易信息", + "tag": "AI", + "reason": "" + }, + { + "code": "688039", + "name": "当虹科技", + "tag": "AI", + "reason": "" + }, + { + "code": "300807", + "name": "天迈科技", + "tag": "AI", + "reason": "" + }, + { + "code": "603995", + "name": "甬金股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "002972", + "name": "科安达", + "tag": "公用", + "reason": "" + }, + { + "code": "688078", + "name": "龙软科技", + "tag": "AI", + "reason": "" + }, + { + "code": "300811", + "name": "铂科新材", + "tag": "资源", + "reason": "" + }, + { + "code": "603109", + "name": "神驰机电", + "tag": "汽车", + "reason": "" + }, + { + "code": "688081", + "name": "兴图新科", + "tag": "AI", + "reason": "" + }, + { + "code": "002973", + "name": "侨银股份", + "tag": "公用", + "reason": "" + }, + { + "code": "688178", + "name": "万德斯", + "tag": "公用", + "reason": "" + }, + { + "code": "300813", + "name": "泰林生物", + "tag": "智能机器", + "reason": "" + }, + { + "code": "603551", + "name": "奥普家居", + "tag": "大消费", + "reason": "" + }, + { + "code": "836871", + "name": "派特尔", + "tag": "化工", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "688100", + "name": "威胜信息", + "tag": "AI", + "reason": "" + }, + { + "code": "688026", + "name": "洁特生物", + "tag": "化工", + "reason": "" + }, + { + "code": "688266", + "name": "泽璟制药-U", + "tag": "医药", + "reason": "" + }, + { + "code": "688159", + "name": "有方科技", + "tag": "AI", + "reason": "" + }, + { + "code": "300815", + "name": "玉禾田", + "tag": "公用", + "reason": "" + }, + { + "code": "603195", + "name": "公牛集团", + "tag": "大消费", + "reason": "" + }, + { + "code": "300816", + "name": "艾可蓝", + "tag": "汽车", + "reason": "" + }, + { + "code": "688398", + "name": "赛特新材", + "tag": "化工", + "reason": "" + }, + { + "code": "688186", + "name": "广大特材", + "tag": "房地产", + "reason": "" + }, + { + "code": "688080", + "name": "映翰通", + "tag": "AI", + "reason": "" + }, + { + "code": "300818", + "name": "耐普矿机", + "tag": "智能机器", + "reason": "" + }, + { + "code": "688208", + "name": "道通科技", + "tag": "AI", + "reason": "" + }, + { + "code": "300820", + "name": "英杰电气", + "tag": "赛道", + "reason": "" + }, + { + "code": "300817", + "name": "双飞集团", + "tag": "智能机器", + "reason": "" + }, + { + "code": "688169", + "name": "石头科技", + "tag": "大消费", + "reason": "" + }, + { + "code": "836208", + "name": "青矩技术", + "tag": "房地产", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603948", + "name": "建业股份", + "tag": "化工", + "reason": "" + }, + { + "code": "872392", + "name": "佳合科技", + "tag": "化工", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "831304", + "name": "迪尔化工", + "tag": "化工", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603949", + "name": "雪龙集团", + "tag": "汽车", + "reason": "" + }, + { + "code": "300819", + "name": "聚杰微纤", + "tag": "大消费", + "reason": "" + }, + { + "code": "300821", + "name": "东岳硅材", + "tag": "化工", + "reason": "" + }, + { + "code": "002977", + "name": "天箭科技", + "tag": "AI", + "reason": "" + }, + { + "code": "837821", + "name": "则成电子", + "tag": "消费电子", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "300823", + "name": "建科机械", + "tag": "智能机器", + "reason": "" + }, + { + "code": "688051", + "name": "佳华科技", + "tag": "AI", + "reason": "" + }, + { + "code": "873339", + "name": "恒太照明", + "tag": "大消费", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603221", + "name": "爱丽家居", + "tag": "化工", + "reason": "" + }, + { + "code": "688228", + "name": "开普云", + "tag": "AI", + "reason": "" + }, + { + "code": "836720", + "name": "吉冈精密", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "839725", + "name": "惠丰钻石", + "tag": "化工", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "688096", + "name": "京源环保", + "tag": "公用", + "reason": "" + }, + { + "code": "836961", + "name": "西磁科技", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "300827", + "name": "上能电气", + "tag": "赛道", + "reason": "" + }, + { + "code": "834058", + "name": "华洋赛车", + "tag": "公用", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "838402", + "name": "硅烷科技", + "tag": "化工", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "002980", + "name": "华盛昌", + "tag": "智能机器", + "reason": "" + }, + { + "code": "002978", + "name": "安宁股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "871857", + "name": "泓禧科技", + "tag": "消费电子", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "872895", + "name": "花溪科技", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "002982", + "name": "湘佳股份", + "tag": "农业", + "reason": "" + }, + { + "code": "871753", + "name": "天纺标", + "tag": "专业服务", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "688318", + "name": "财富趋势", + "tag": "AI", + "reason": "" + }, + { + "code": "002983", + "name": "芯瑞达", + "tag": "VR", + "reason": "" + }, + { + "code": "688365", + "name": "光云科技", + "tag": "AI", + "reason": "" + }, + { + "code": "002985", + "name": "北摩高科", + "tag": "军工", + "reason": "" + }, + { + "code": "300830", + "name": "金现代", + "tag": "AI", + "reason": "" + }, + { + "code": "002987", + "name": "京北方", + "tag": "AI", + "reason": "" + }, + { + "code": "688466", + "name": "金科环境", + "tag": "公用", + "reason": "" + }, + { + "code": "688588", + "name": "凌志软件", + "tag": "AI", + "reason": "" + }, + { + "code": "300833", + "name": "浩洋股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "605001", + "name": "威奥股份", + "tag": "公用", + "reason": "" + }, + { + "code": "002990", + "name": "盛视科技", + "tag": "AI", + "reason": "" + }, + { + "code": "300835", + "name": "龙磁科技", + "tag": "资源", + "reason": "" + }, + { + "code": "603950", + "name": "长源东谷", + "tag": "汽车", + "reason": "" + }, + { + "code": "300836", + "name": "佰奥智能", + "tag": "智能机器", + "reason": "" + }, + { + "code": "605288", + "name": "凯迪股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "870508", + "name": "丰安股份", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "002986", + "name": "宇新股份", + "tag": "化工", + "reason": "" + }, + { + "code": "601827", + "name": "三峰环境", + "tag": "公用", + "reason": "" + }, + { + "code": "300837", + "name": "浙矿股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "688312", + "name": "燕麦科技", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300838", + "name": "浙江力诺", + "tag": "智能机器", + "reason": "" + }, + { + "code": "688157", + "name": "松井股份", + "tag": "化工", + "reason": "" + }, + { + "code": "002989", + "name": "中天精装", + "tag": "房地产", + "reason": "" + }, + { + "code": "688004", + "name": "博汇科技", + "tag": "AI", + "reason": "" + }, + { + "code": "688106", + "name": "金宏气体", + "tag": "半导体", + "reason": "" + }, + { + "code": "605166", + "name": "聚合顺", + "tag": "化工", + "reason": "" + }, + { + "code": "688505", + "name": "复旦张江", + "tag": "医药", + "reason": "" + }, + { + "code": "300824", + "name": "北鼎股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "688528", + "name": "秦川物联", + "tag": "智能机器", + "reason": "" + }, + { + "code": "688600", + "name": "皖仪科技", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300845", + "name": "捷安高科", + "tag": "AI", + "reason": "" + }, + { + "code": "688377", + "name": "迪威尔", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300840", + "name": "酷特智能", + "tag": "大消费", + "reason": "" + }, + { + "code": "688027", + "name": "国盾量子", + "tag": "AI", + "reason": "" + }, + { + "code": "688060", + "name": "云涌科技", + "tag": "AI", + "reason": "" + }, + { + "code": "300849", + "name": "锦盛新材", + "tag": "大消费", + "reason": "" + }, + { + "code": "300852", + "name": "四会富仕", + "tag": "消费电子", + "reason": "" + }, + { + "code": "300850", + "name": "新强联", + "tag": "赛道", + "reason": "" + }, + { + "code": "688309", + "name": "恒誉环保", + "tag": "公用", + "reason": "" + }, + { + "code": "605108", + "name": "同庆楼", + "tag": "大消费", + "reason": "" + }, + { + "code": "688500", + "name": "*ST慧辰", + "tag": "AI", + "reason": "" + }, + { + "code": "688579", + "name": "山大地纬", + "tag": "AI", + "reason": "" + }, + { + "code": "300851", + "name": "交大思诺", + "tag": "公用", + "reason": "" + }, + { + "code": "688069", + "name": "德林海", + "tag": "公用", + "reason": "" + }, + { + "code": "688077", + "name": "大地熊", + "tag": "资源", + "reason": "" + }, + { + "code": "688561", + "name": "奇安信-U", + "tag": "AI", + "reason": "" + }, + { + "code": "300856", + "name": "科思股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "871245", + "name": "威博液压", + "tag": "房地产", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603408", + "name": "建霖家居", + "tag": "房地产", + "reason": "" + }, + { + "code": "605222", + "name": "起帆电缆", + "tag": "电力", + "reason": "" + }, + { + "code": "688311", + "name": "盟升电子", + "tag": "AI", + "reason": "" + }, + { + "code": "605318", + "name": "法狮龙", + "tag": "房地产", + "reason": "" + }, + { + "code": "002992", + "name": "宝明科技", + "tag": "VR", + "reason": "" + }, + { + "code": "838262", + "name": "太湖雪", + "tag": "大消费", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "300859", + "name": "*ST西域", + "tag": "大消费", + "reason": "" + }, + { + "code": "605066", + "name": "天正电气", + "tag": "电力", + "reason": "" + }, + { + "code": "688556", + "name": "高测股份", + "tag": "赛道", + "reason": "" + }, + { + "code": "688339", + "name": "亿华通-U", + "tag": "赛道", + "reason": "" + }, + { + "code": "605100", + "name": "华丰股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "688313", + "name": "仕佳光子", + "tag": "AI", + "reason": "" + }, + { + "code": "688065", + "name": "凯赛生物", + "tag": "化工", + "reason": "" + }, + { + "code": "605088", + "name": "冠盛股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "688055", + "name": "龙腾光电", + "tag": "VR", + "reason": "" + }, + { + "code": "688335", + "name": "复洁环保", + "tag": "公用", + "reason": "" + }, + { + "code": "688229", + "name": "博睿数据", + "tag": "AI", + "reason": "" + }, + { + "code": "688519", + "name": "南亚新材", + "tag": "消费电子", + "reason": "" + }, + { + "code": "605333", + "name": "沪光股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "603931", + "name": "格林达", + "tag": "半导体", + "reason": "" + }, + { + "code": "688379", + "name": "华光新材", + "tag": "智能机器", + "reason": "" + }, + { + "code": "688596", + "name": "正帆科技", + "tag": "智能机器", + "reason": "" + }, + { + "code": "605008", + "name": "长鸿高科", + "tag": "化工", + "reason": "" + }, + { + "code": "300877", + "name": "金春股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "300868", + "name": "杰美特", + "tag": "消费电子", + "reason": "" + }, + { + "code": "300873", + "name": "海晨股份", + "tag": "统一大市场", + "reason": "" + }, + { + "code": "300863", + "name": "卡倍亿", + "tag": "汽车", + "reason": "" + }, + { + "code": "300864", + "name": "南大环境", + "tag": "公用", + "reason": "" + }, + { + "code": "300872", + "name": "天阳科技", + "tag": "AI", + "reason": "" + }, + { + "code": "300871", + "name": "回盛生物", + "tag": "农业", + "reason": "" + }, + { + "code": "300862", + "name": "蓝盾光电", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300867", + "name": "圣元环保", + "tag": "公用", + "reason": "" + }, + { + "code": "300861", + "name": "美畅股份", + "tag": "化工", + "reason": "" + }, + { + "code": "605255", + "name": "天普股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "605123", + "name": "派克新材", + "tag": "智能机器", + "reason": "" + }, + { + "code": "688017", + "name": "绿的谐波", + "tag": "智能机器", + "reason": "" + }, + { + "code": "688215", + "name": "瑞晟智能", + "tag": "智能机器", + "reason": "" + }, + { + "code": "603155", + "name": "新亚强", + "tag": "化工", + "reason": "" + }, + { + "code": "300879", + "name": "大叶股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300881", + "name": "盛德鑫泰", + "tag": "房地产", + "reason": "" + }, + { + "code": "300880", + "name": "迦南智能", + "tag": "电力", + "reason": "" + }, + { + "code": "688550", + "name": "瑞联新材", + "tag": "半导体", + "reason": "" + }, + { + "code": "688513", + "name": "苑东生物", + "tag": "医药", + "reason": "" + }, + { + "code": "688056", + "name": "莱伯泰科", + "tag": "智能机器", + "reason": "" + }, + { + "code": "688378", + "name": "奥来德", + "tag": "智能机器", + "reason": "" + }, + { + "code": "605006", + "name": "山东玻纤", + "tag": "化工", + "reason": "" + }, + { + "code": "002997", + "name": "瑞鹄模具", + "tag": "汽车", + "reason": "" + }, + { + "code": "605003", + "name": "众望布艺", + "tag": "大消费", + "reason": "" + }, + { + "code": "688095", + "name": "福昕软件", + "tag": "AI", + "reason": "" + }, + { + "code": "300883", + "name": "龙利得", + "tag": "大消费", + "reason": "" + }, + { + "code": "300885", + "name": "海昌新材", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300882", + "name": "万胜智能", + "tag": "电力", + "reason": "" + }, + { + "code": "873169", + "name": "七丰精工", + "tag": "房地产", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "605009", + "name": "豪悦护理", + "tag": "大消费", + "reason": "" + }, + { + "code": "002984", + "name": "森麒麟", + "tag": "化工", + "reason": "" + }, + { + "code": "605128", + "name": "上海沿浦", + "tag": "汽车", + "reason": "" + }, + { + "code": "300889", + "name": "爱克股份", + "tag": "VR", + "reason": "" + }, + { + "code": "300887", + "name": "谱尼测试", + "tag": "专业服务", + "reason": "" + }, + { + "code": "603112", + "name": "华翔股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "300888", + "name": "稳健医疗", + "tag": "大消费", + "reason": "" + }, + { + "code": "300891", + "name": "惠云钛业", + "tag": "化工", + "reason": "" + }, + { + "code": "605116", + "name": "奥锐特", + "tag": "医药", + "reason": "" + }, + { + "code": "688127", + "name": "蓝特光学", + "tag": "VR", + "reason": "" + }, + { + "code": "003006", + "name": "百亚股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "837174", + "name": "宏裕包材", + "tag": "化工", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "688526", + "name": "科前生物", + "tag": "农业", + "reason": "" + }, + { + "code": "688156", + "name": "路德环境", + "tag": "公用", + "reason": "" + }, + { + "code": "003005", + "name": "竞业达", + "tag": "AI", + "reason": "" + }, + { + "code": "003002", + "name": "壶化股份", + "tag": "化工", + "reason": "" + }, + { + "code": "003008", + "name": "开普检测", + "tag": "专业服务", + "reason": "" + }, + { + "code": "605050", + "name": "福然德", + "tag": "统一大市场", + "reason": "" + }, + { + "code": "300893", + "name": "松原股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "300895", + "name": "铜牛信息", + "tag": "AI", + "reason": "" + }, + { + "code": "603565", + "name": "中谷物流", + "tag": "公用", + "reason": "" + }, + { + "code": "003010", + "name": "若羽臣", + "tag": "AI", + "reason": "" + }, + { + "code": "002998", + "name": "优彩资源", + "tag": "化工", + "reason": "" + }, + { + "code": "688585", + "name": "上纬新材", + "tag": "化工", + "reason": "" + }, + { + "code": "300897", + "name": "山科智能", + "tag": "智能机器", + "reason": "" + }, + { + "code": "605018", + "name": "长华集团", + "tag": "汽车", + "reason": "" + }, + { + "code": "688093", + "name": "世华科技", + "tag": "消费电子", + "reason": "" + }, + { + "code": "605099", + "name": "共创草坪", + "tag": "大消费", + "reason": "" + }, + { + "code": "003011", + "name": "海象新材", + "tag": "化工", + "reason": "" + }, + { + "code": "003001", + "name": "中岩大地", + "tag": "房地产", + "reason": "" + }, + { + "code": "688330", + "name": "宏力达", + "tag": "电力", + "reason": "" + }, + { + "code": "688386", + "name": "泛亚微透", + "tag": "化工", + "reason": "" + }, + { + "code": "300899", + "name": "上海凯鑫", + "tag": "公用", + "reason": "" + }, + { + "code": "605336", + "name": "帅丰电器", + "tag": "大消费", + "reason": "" + }, + { + "code": "003012", + "name": "东鹏控股", + "tag": "房地产", + "reason": "" + }, + { + "code": "601568", + "name": "北元集团", + "tag": "化工", + "reason": "" + }, + { + "code": "605058", + "name": "澳弘电子", + "tag": "消费电子", + "reason": "" + }, + { + "code": "688788", + "name": "科思科技", + "tag": "AI", + "reason": "" + }, + { + "code": "003013", + "name": "地铁设计", + "tag": "房地产", + "reason": "" + }, + { + "code": "688129", + "name": "东来技术", + "tag": "化工", + "reason": "" + }, + { + "code": "688179", + "name": "阿拉丁", + "tag": "专业服务", + "reason": "" + }, + { + "code": "003016", + "name": "欣贺股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "003017", + "name": "大洋生物", + "tag": "化工", + "reason": "" + }, + { + "code": "836247", + "name": "华密新材", + "tag": "化工", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "689009", + "name": "九号公司-WD", + "tag": "公用", + "reason": "" + }, + { + "code": "300902", + "name": "国安达", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300901", + "name": "中胤时尚", + "tag": "大消费", + "reason": "" + }, + { + "code": "300900", + "name": "广联航空", + "tag": "军工", + "reason": "" + }, + { + "code": "688133", + "name": "泰坦科技", + "tag": "专业服务", + "reason": "" + }, + { + "code": "300905", + "name": "宝丽迪", + "tag": "化工", + "reason": "" + }, + { + "code": "300906", + "name": "日月明", + "tag": "公用", + "reason": "" + }, + { + "code": "003018", + "name": "金富科技", + "tag": "化工", + "reason": "" + }, + { + "code": "605007", + "name": "五洲特纸", + "tag": "大消费", + "reason": "" + }, + { + "code": "688057", + "name": "金达莱", + "tag": "公用", + "reason": "" + }, + { + "code": "688160", + "name": "步科股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300884", + "name": "狄耐克", + "tag": "AI", + "reason": "" + }, + { + "code": "873527", + "name": "夜光明", + "tag": "化工", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "688219", + "name": "会通股份", + "tag": "化工", + "reason": "" + }, + { + "code": "300909", + "name": "汇创达", + "tag": "VR", + "reason": "" + }, + { + "code": "300907", + "name": "康平科技", + "tag": "智能机器", + "reason": "" + }, + { + "code": "605068", + "name": "明新旭腾", + "tag": "汽车", + "reason": "" + }, + { + "code": "688777", + "name": "中控技术", + "tag": "AI", + "reason": "" + }, + { + "code": "605177", + "name": "东亚药业", + "tag": "医药", + "reason": "" + }, + { + "code": "003004", + "name": "声迅股份", + "tag": "AI", + "reason": "" + }, + { + "code": "300910", + "name": "瑞丰新材", + "tag": "化工", + "reason": "" + }, + { + "code": "605266", + "name": "健之佳", + "tag": "医药", + "reason": "" + }, + { + "code": "605258", + "name": "协和电子", + "tag": "消费电子", + "reason": "" + }, + { + "code": "300911", + "name": "亿田智能", + "tag": "大消费", + "reason": "" + }, + { + "code": "601686", + "name": "友发集团", + "tag": "房地产", + "reason": "" + }, + { + "code": "688590", + "name": "新致软件", + "tag": "AI", + "reason": "" + }, + { + "code": "605183", + "name": "确成股份", + "tag": "化工", + "reason": "" + }, + { + "code": "300913", + "name": "兆龙互连", + "tag": "AI", + "reason": "" + }, + { + "code": "300912", + "name": "凯龙高科", + "tag": "汽车", + "reason": "" + }, + { + "code": "688308", + "name": "欧科亿", + "tag": "智能机器", + "reason": "" + }, + { + "code": "688571", + "name": "杭华股份", + "tag": "化工", + "reason": "" + }, + { + "code": "003023", + "name": "彩虹集团", + "tag": "大消费", + "reason": "" + }, + { + "code": "605151", + "name": "西上海", + "tag": "汽车", + "reason": "" + }, + { + "code": "605299", + "name": "舒华体育", + "tag": "大消费", + "reason": "" + }, + { + "code": "688510", + "name": "航亚科技", + "tag": "军工", + "reason": "" + }, + { + "code": "836270", + "name": "天铭科技", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "688668", + "name": "鼎通科技", + "tag": "AI", + "reason": "" + }, + { + "code": "605500", + "name": "森林包装", + "tag": "大消费", + "reason": "" + }, + { + "code": "605186", + "name": "健麾信息", + "tag": "医药", + "reason": "" + }, + { + "code": "300921", + "name": "南凌科技", + "tag": "AI", + "reason": "" + }, + { + "code": "300918", + "name": "南山智尚", + "tag": "大消费", + "reason": "" + }, + { + "code": "871981", + "name": "晶赛科技", + "tag": "消费电子", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "300923", + "name": "研奥股份", + "tag": "公用", + "reason": "" + }, + { + "code": "601956", + "name": "东贝集团", + "tag": "大消费", + "reason": "" + }, + { + "code": "688679", + "name": "通源环境", + "tag": "公用", + "reason": "" + }, + { + "code": "300922", + "name": "天秦装备", + "tag": "化工", + "reason": "" + }, + { + "code": "300920", + "name": "润阳科技", + "tag": "化工", + "reason": "" + }, + { + "code": "605377", + "name": "华旺科技", + "tag": "大消费", + "reason": "" + }, + { + "code": "688698", + "name": "伟创电气", + "tag": "智能机器", + "reason": "" + }, + { + "code": "688618", + "name": "三旺通信", + "tag": "AI", + "reason": "" + }, + { + "code": "300925", + "name": "法本信息", + "tag": "AI", + "reason": "" + }, + { + "code": "688686", + "name": "奥普特", + "tag": "智能机器", + "reason": "" + }, + { + "code": "605155", + "name": "西大门", + "tag": "房地产", + "reason": "" + }, + { + "code": "300894", + "name": "火星人", + "tag": "大消费", + "reason": "" + }, + { + "code": "873305", + "name": "九菱科技", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "300928", + "name": "华安鑫创", + "tag": "汽车", + "reason": "" + }, + { + "code": "300927", + "name": "江天化学", + "tag": "化工", + "reason": "" + }, + { + "code": "300926", + "name": "博俊科技", + "tag": "汽车", + "reason": "" + }, + { + "code": "003033", + "name": "征和工业", + "tag": "汽车", + "reason": "" + }, + { + "code": "605005", + "name": "合兴股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "605228", + "name": "神通科技", + "tag": "汽车", + "reason": "" + }, + { + "code": "300935", + "name": "盈建科", + "tag": "AI", + "reason": "" + }, + { + "code": "300929", + "name": "华骐环保", + "tag": "公用", + "reason": "" + }, + { + "code": "605398", + "name": "新炬网络", + "tag": "AI", + "reason": "" + }, + { + "code": "300931", + "name": "通用电梯", + "tag": "智能机器", + "reason": "" + }, + { + "code": "688680", + "name": "海优新材", + "tag": "赛道", + "reason": "" + }, + { + "code": "300933", + "name": "中辰股份", + "tag": "电力", + "reason": "" + }, + { + "code": "300932", + "name": "三友联众", + "tag": "电力", + "reason": "" + }, + { + "code": "300936", + "name": "中英科技", + "tag": "消费电子", + "reason": "" + }, + { + "code": "870656", + "name": "N海昇", + "tag": "农业", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "300937", + "name": "药易购", + "tag": "医药", + "reason": "" + }, + { + "code": "300938", + "name": "信测标准", + "tag": "专业服务", + "reason": "" + }, + { + "code": "688350", + "name": "富淼科技", + "tag": "化工", + "reason": "" + }, + { + "code": "300939", + "name": "秋田微", + "tag": "VR", + "reason": "" + }, + { + "code": "003036", + "name": "泰坦股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "605055", + "name": "迎丰股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "688628", + "name": "优利德", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300940", + "name": "南极光", + "tag": "VR", + "reason": "" + }, + { + "code": "870204", + "name": "沪江材料", + "tag": "化工", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "003037", + "name": "三和管桩", + "tag": "房地产", + "reason": "" + }, + { + "code": "688059", + "name": "华锐精密", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300946", + "name": "恒而达", + "tag": "智能机器", + "reason": "" + }, + { + "code": "605081", + "name": "太和水", + "tag": "公用", + "reason": "" + }, + { + "code": "688665", + "name": "四方光电", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300942", + "name": "易瑞生物", + "tag": "医药", + "reason": "" + }, + { + "code": "688070", + "name": "纵横股份", + "tag": "军工", + "reason": "" + }, + { + "code": "300943", + "name": "春晖智控", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300945", + "name": "曼卡龙", + "tag": "大消费", + "reason": "" + }, + { + "code": "688619", + "name": "罗普特", + "tag": "AI", + "reason": "" + }, + { + "code": "605133", + "name": "嵘泰股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "605268", + "name": "王力安防", + "tag": "房地产", + "reason": "" + }, + { + "code": "688183", + "name": "生益电子", + "tag": "消费电子", + "reason": "" + }, + { + "code": "300948", + "name": "冠中生态", + "tag": "公用", + "reason": "" + }, + { + "code": "605060", + "name": "联德股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "605298", + "name": "必得科技", + "tag": "公用", + "reason": "" + }, + { + "code": "605303", + "name": "园林股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "871478", + "name": "巨能股份", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "688079", + "name": "美迪凯", + "tag": "VR", + "reason": "" + }, + { + "code": "688696", + "name": "极米科技", + "tag": "大消费", + "reason": "" + }, + { + "code": "300950", + "name": "德固特", + "tag": "智能机器", + "reason": "" + }, + { + "code": "605208", + "name": "永茂泰", + "tag": "汽车", + "reason": "" + }, + { + "code": "003039", + "name": "顺控发展", + "tag": "公用", + "reason": "" + }, + { + "code": "688328", + "name": "深科达", + "tag": "智能机器", + "reason": "" + }, + { + "code": "688676", + "name": "金盘科技", + "tag": "电力", + "reason": "" + }, + { + "code": "605122", + "name": "四方新材", + "tag": "房地产", + "reason": "" + }, + { + "code": "688083", + "name": "中望软件", + "tag": "AI", + "reason": "" + }, + { + "code": "300952", + "name": "恒辉安防", + "tag": "大消费", + "reason": "" + }, + { + "code": "688667", + "name": "菱电电控", + "tag": "汽车", + "reason": "" + }, + { + "code": "688316", + "name": "青云科技-U", + "tag": "AI", + "reason": "" + }, + { + "code": "688616", + "name": "西力科技", + "tag": "电力", + "reason": "" + }, + { + "code": "837748", + "name": "路桥信息", + "tag": "AI", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "688092", + "name": "爱科科技", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300959", + "name": "线上线下", + "tag": "AI", + "reason": "" + }, + { + "code": "003040", + "name": "楚天龙", + "tag": "AI", + "reason": "" + }, + { + "code": "688633", + "name": "星球石墨", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300955", + "name": "嘉亨家化", + "tag": "大消费", + "reason": "" + }, + { + "code": "300957", + "name": "贝泰妮", + "tag": "大消费", + "reason": "" + }, + { + "code": "603759", + "name": "海天股份", + "tag": "公用", + "reason": "" + }, + { + "code": "688195", + "name": "腾景科技", + "tag": "VR", + "reason": "" + }, + { + "code": "300958", + "name": "建工修复", + "tag": "公用", + "reason": "" + }, + { + "code": "300960", + "name": "通业科技", + "tag": "公用", + "reason": "" + }, + { + "code": "688109", + "name": "品茗科技", + "tag": "AI", + "reason": "" + }, + { + "code": "300961", + "name": "深水海纳", + "tag": "公用", + "reason": "" + }, + { + "code": "688659", + "name": "元琛科技", + "tag": "公用", + "reason": "" + }, + { + "code": "688630", + "name": "芯碁微装", + "tag": "智能机器", + "reason": "" + }, + { + "code": "688662", + "name": "富信科技", + "tag": "消费电子", + "reason": "" + }, + { + "code": "300965", + "name": "恒宇信通", + "tag": "军工", + "reason": "" + }, + { + "code": "003041", + "name": "真爱美家", + "tag": "大消费", + "reason": "" + }, + { + "code": "003043", + "name": "华亚智能", + "tag": "智能机器", + "reason": "" + }, + { + "code": "603324", + "name": "盛剑环境", + "tag": "公用", + "reason": "" + }, + { + "code": "873167", + "name": "新赣江", + "tag": "医药", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "688191", + "name": "智洋创新", + "tag": "电力", + "reason": "" + }, + { + "code": "688636", + "name": "智明达", + "tag": "AI", + "reason": "" + }, + { + "code": "300966", + "name": "共同药业", + "tag": "医药", + "reason": "" + }, + { + "code": "688611", + "name": "杭州柯林", + "tag": "电力", + "reason": "" + }, + { + "code": "300969", + "name": "恒帅股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "300970", + "name": "华绿生物", + "tag": "农业", + "reason": "" + }, + { + "code": "688315", + "name": "诺禾致源", + "tag": "医药", + "reason": "" + }, + { + "code": "688663", + "name": "新风光", + "tag": "电力", + "reason": "" + }, + { + "code": "300967", + "name": "晓鸣股份", + "tag": "农业", + "reason": "" + }, + { + "code": "601279", + "name": "英利汽车", + "tag": "汽车", + "reason": "" + }, + { + "code": "300971", + "name": "博亚精工", + "tag": "智能机器", + "reason": "" + }, + { + "code": "605086", + "name": "龙高股份", + "tag": "公用", + "reason": "" + }, + { + "code": "873223", + "name": "荣亿精密", + "tag": "房地产", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "688533", + "name": "上声电子", + "tag": "汽车", + "reason": "" + }, + { + "code": "300972", + "name": "万辰集团", + "tag": "农业", + "reason": "" + }, + { + "code": "688682", + "name": "霍莱沃", + "tag": "AI", + "reason": "" + }, + { + "code": "300977", + "name": "深圳瑞捷", + "tag": "房地产", + "reason": "" + }, + { + "code": "300983", + "name": "尤安设计", + "tag": "房地产", + "reason": "" + }, + { + "code": "605098", + "name": "行动教育", + "tag": "教育", + "reason": "" + }, + { + "code": "688201", + "name": "信安世纪", + "tag": "AI", + "reason": "" + }, + { + "code": "300975", + "name": "商络电子", + "tag": "大消费", + "reason": "" + }, + { + "code": "605289", + "name": "罗曼股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "300979", + "name": "华利集团", + "tag": "大消费", + "reason": "" + }, + { + "code": "300978", + "name": "东箭科技", + "tag": "汽车", + "reason": "" + }, + { + "code": "300982", + "name": "苏文电能", + "tag": "房地产", + "reason": "" + }, + { + "code": "688323", + "name": "瑞华泰", + "tag": "化工", + "reason": "" + }, + { + "code": "001201", + "name": "东瑞股份", + "tag": "农业", + "reason": "" + }, + { + "code": "873576", + "name": "天力复合", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "688395", + "name": "正弦电气", + "tag": "智能机器", + "reason": "" + }, + { + "code": "001202", + "name": "炬申股份", + "tag": "统一大市场", + "reason": "" + }, + { + "code": "688113", + "name": "联测科技", + "tag": "智能机器", + "reason": "" + }, + { + "code": "605305", + "name": "中际联合", + "tag": "房地产", + "reason": "" + }, + { + "code": "688655", + "name": "迅捷兴", + "tag": "消费电子", + "reason": "" + }, + { + "code": "605196", + "name": "华通线缆", + "tag": "电力", + "reason": "" + }, + { + "code": "688355", + "name": "明志科技", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300988", + "name": "津荣天宇", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300990", + "name": "同飞股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "688565", + "name": "力源科技", + "tag": "公用", + "reason": "" + }, + { + "code": "605488", + "name": "福莱新材", + "tag": "化工", + "reason": "" + }, + { + "code": "001205", + "name": "盛航股份", + "tag": "公用", + "reason": "" + }, + { + "code": "001206", + "name": "依依股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "688660", + "name": "电气风电", + "tag": "赛道", + "reason": "" + }, + { + "code": "688076", + "name": "诺泰生物", + "tag": "医药", + "reason": "" + }, + { + "code": "688359", + "name": "三孚新科", + "tag": "半导体", + "reason": "" + }, + { + "code": "300993", + "name": "玉马遮阳", + "tag": "房地产", + "reason": "" + }, + { + "code": "300992", + "name": "泰福泵业", + "tag": "智能机器", + "reason": "" + }, + { + "code": "300614", + "name": "百川畅银", + "tag": "公用", + "reason": "" + }, + { + "code": "603836", + "name": "海程邦达", + "tag": "统一大市场", + "reason": "" + }, + { + "code": "300995", + "name": "奇德新材", + "tag": "化工", + "reason": "" + }, + { + "code": "605189", + "name": "富春染织", + "tag": "大消费", + "reason": "" + }, + { + "code": "688538", + "name": "和辉光电-U", + "tag": "VR", + "reason": "" + }, + { + "code": "605296", + "name": "神农集团", + "tag": "农业", + "reason": "" + }, + { + "code": "301001", + "name": "凯淳股份", + "tag": "AI", + "reason": "" + }, + { + "code": "603511", + "name": "爱慕股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "301005", + "name": "超捷股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "301003", + "name": "江苏博云", + "tag": "化工", + "reason": "" + }, + { + "code": "688117", + "name": "圣诺生物", + "tag": "医药", + "reason": "" + }, + { + "code": "300996", + "name": "普联软件", + "tag": "AI", + "reason": "" + }, + { + "code": "688625", + "name": "呈和科技", + "tag": "化工", + "reason": "" + }, + { + "code": "605319", + "name": "无锡振华", + "tag": "汽车", + "reason": "" + }, + { + "code": "301002", + "name": "崧盛股份", + "tag": "电力", + "reason": "" + }, + { + "code": "301006", + "name": "迈拓股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "688269", + "name": "凯立新材", + "tag": "化工", + "reason": "" + }, + { + "code": "301008", + "name": "宏昌科技", + "tag": "大消费", + "reason": "" + }, + { + "code": "605259", + "name": "绿田机械", + "tag": "智能机器", + "reason": "" + }, + { + "code": "688700", + "name": "东威科技", + "tag": "智能机器", + "reason": "" + }, + { + "code": "688681", + "name": "科汇股份", + "tag": "电力", + "reason": "" + }, + { + "code": "301007", + "name": "德迈仕", + "tag": "汽车", + "reason": "" + }, + { + "code": "688597", + "name": "煜邦电力", + "tag": "电力", + "reason": "" + }, + { + "code": "301009", + "name": "可靠股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "688517", + "name": "金冠电气", + "tag": "电力", + "reason": "" + }, + { + "code": "301010", + "name": "晶雪节能", + "tag": "房地产", + "reason": "" + }, + { + "code": "300984", + "name": "金沃股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "688621", + "name": "阳光诺和", + "tag": "医药", + "reason": "" + }, + { + "code": "688690", + "name": "纳微科技", + "tag": "医药", + "reason": "" + }, + { + "code": "001207", + "name": "联科科技", + "tag": "化工", + "reason": "" + }, + { + "code": "001208", + "name": "华菱线缆", + "tag": "电力", + "reason": "" + }, + { + "code": "301004", + "name": "嘉益股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "688367", + "name": "工大高科", + "tag": "公用", + "reason": "" + }, + { + "code": "301019", + "name": "宁波色母", + "tag": "化工", + "reason": "" + }, + { + "code": "301013", + "name": "利和兴", + "tag": "智能机器", + "reason": "" + }, + { + "code": "603171", + "name": "税友股份", + "tag": "AI", + "reason": "" + }, + { + "code": "301016", + "name": "雷尔伟", + "tag": "公用", + "reason": "" + }, + { + "code": "301022", + "name": "海泰科", + "tag": "汽车", + "reason": "" + }, + { + "code": "688239", + "name": "航宇科技", + "tag": "军工", + "reason": "" + }, + { + "code": "301017", + "name": "漱玉平民", + "tag": "医药", + "reason": "" + }, + { + "code": "301021", + "name": "英诺激光", + "tag": "消费电子", + "reason": "" + }, + { + "code": "301020", + "name": "密封科技", + "tag": "汽车", + "reason": "" + }, + { + "code": "605162", + "name": "新中港", + "tag": "公用", + "reason": "" + }, + { + "code": "688226", + "name": "威腾电气", + "tag": "电力", + "reason": "" + }, + { + "code": "301023", + "name": "江南奕帆", + "tag": "智能机器", + "reason": "" + }, + { + "code": "301018", + "name": "申菱环境", + "tag": "智能机器", + "reason": "" + }, + { + "code": "688789", + "name": "宏华数科", + "tag": "智能机器", + "reason": "" + }, + { + "code": "301039", + "name": "中集车辆", + "tag": "汽车", + "reason": "" + }, + { + "code": "688305", + "name": "科德数控", + "tag": "智能机器", + "reason": "" + }, + { + "code": "688087", + "name": "英科再生", + "tag": "化工", + "reason": "" + }, + { + "code": "688038", + "name": "中科通达", + "tag": "AI", + "reason": "" + }, + { + "code": "688793", + "name": "倍轻松", + "tag": "大消费", + "reason": "" + }, + { + "code": "301031", + "name": "中熔电气", + "tag": "电力", + "reason": "" + }, + { + "code": "301027", + "name": "华蓝集团", + "tag": "房地产", + "reason": "" + }, + { + "code": "688501", + "name": "青达环保", + "tag": "公用", + "reason": "" + }, + { + "code": "301026", + "name": "浩通科技", + "tag": "资源", + "reason": "" + }, + { + "code": "301028", + "name": "东亚机械", + "tag": "智能机器", + "reason": "" + }, + { + "code": "688303", + "name": "大全能源", + "tag": "赛道", + "reason": "" + }, + { + "code": "688800", + "name": "瑞可达", + "tag": "消费电子", + "reason": "" + }, + { + "code": "301032", + "name": "新柴股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "301030", + "name": "仕净科技", + "tag": "智能机器", + "reason": "" + }, + { + "code": "301029", + "name": "怡合达", + "tag": "智能机器", + "reason": "" + }, + { + "code": "688768", + "name": "容知日新", + "tag": "智能机器", + "reason": "" + }, + { + "code": "301033", + "name": "迈普医学", + "tag": "医药", + "reason": "" + }, + { + "code": "688296", + "name": "和达科技", + "tag": "AI", + "reason": "" + }, + { + "code": "688718", + "name": "唯赛勃", + "tag": "智能机器", + "reason": "" + }, + { + "code": "301024", + "name": "霍普股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "301035", + "name": "润丰股份", + "tag": "农业", + "reason": "" + }, + { + "code": "688071", + "name": "华依科技", + "tag": "汽车", + "reason": "" + }, + { + "code": "001210", + "name": "金房能源", + "tag": "公用", + "reason": "" + }, + { + "code": "301036", + "name": "双乐股份", + "tag": "化工", + "reason": "" + }, + { + "code": "688511", + "name": "天微电子", + "tag": "消费电子", + "reason": "" + }, + { + "code": "301037", + "name": "保立佳", + "tag": "化工", + "reason": "" + }, + { + "code": "688670", + "name": "金迪克", + "tag": "医药", + "reason": "" + }, + { + "code": "605507", + "name": "国邦医药", + "tag": "医药", + "reason": "" + }, + { + "code": "301040", + "name": "中环海陆", + "tag": "房地产", + "reason": "" + }, + { + "code": "301038", + "name": "深水规院", + "tag": "房地产", + "reason": "" + }, + { + "code": "300774", + "name": "倍杰特", + "tag": "公用", + "reason": "" + }, + { + "code": "301042", + "name": "安联锐视", + "tag": "AI", + "reason": "" + }, + { + "code": "001211", + "name": "双枪科技", + "tag": "房地产", + "reason": "" + }, + { + "code": "300964", + "name": "本川智能", + "tag": "消费电子", + "reason": "" + }, + { + "code": "301043", + "name": "绿岛风", + "tag": "智能机器", + "reason": "" + }, + { + "code": "301041", + "name": "金百泽", + "tag": "消费电子", + "reason": "" + }, + { + "code": "300814", + "name": "中富电路", + "tag": "消费电子", + "reason": "" + }, + { + "code": "300994", + "name": "久祺股份", + "tag": "公用", + "reason": "" + }, + { + "code": "301045", + "name": "天禄科技", + "tag": "VR", + "reason": "" + }, + { + "code": "300844", + "name": "山水比德", + "tag": "房地产", + "reason": "" + }, + { + "code": "301046", + "name": "能辉科技", + "tag": "赛道", + "reason": "" + }, + { + "code": "301049", + "name": "超越科技", + "tag": "公用", + "reason": "" + }, + { + "code": "301051", + "name": "信濠光电", + "tag": "化工", + "reason": "" + }, + { + "code": "688776", + "name": "国光电气", + "tag": "消费电子", + "reason": "" + }, + { + "code": "301053", + "name": "远信工业", + "tag": "智能机器", + "reason": "" + }, + { + "code": "688121", + "name": "卓然股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "301055", + "name": "张小泉", + "tag": "智能机器", + "reason": "" + }, + { + "code": "301056", + "name": "森赫股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "605599", + "name": "菜百股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "688091", + "name": "上海谊众", + "tag": "医药", + "reason": "" + }, + { + "code": "301057", + "name": "汇隆新材", + "tag": "化工", + "reason": "" + }, + { + "code": "688103", + "name": "国力股份", + "tag": "消费电子", + "reason": "" + }, + { + "code": "688622", + "name": "禾信仪器", + "tag": "智能机器", + "reason": "" + }, + { + "code": "301059", + "name": "金三江", + "tag": "化工", + "reason": "" + }, + { + "code": "301061", + "name": "匠心家居", + "tag": "大消费", + "reason": "" + }, + { + "code": "301062", + "name": "上海艾录", + "tag": "化工", + "reason": "" + }, + { + "code": "605033", + "name": "美邦股份", + "tag": "农业", + "reason": "" + }, + { + "code": "688701", + "name": "卓锦股份", + "tag": "公用", + "reason": "" + }, + { + "code": "300854", + "name": "中兰环保", + "tag": "公用", + "reason": "" + }, + { + "code": "605598", + "name": "上海港湾", + "tag": "房地产", + "reason": "" + }, + { + "code": "688697", + "name": "纽威数控", + "tag": "智能机器", + "reason": "" + }, + { + "code": "301066", + "name": "万事利", + "tag": "大消费", + "reason": "" + }, + { + "code": "301070", + "name": "开勒股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "301071", + "name": "力量钻石", + "tag": "化工", + "reason": "" + }, + { + "code": "301063", + "name": "海锅股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "301068", + "name": "大地海洋", + "tag": "公用", + "reason": "" + }, + { + "code": "301072", + "name": "中捷精工", + "tag": "汽车", + "reason": "" + }, + { + "code": "301075", + "name": "多瑞医药", + "tag": "医药", + "reason": "" + }, + { + "code": "301077", + "name": "星华新材", + "tag": "化工", + "reason": "" + }, + { + "code": "301073", + "name": "君亭酒店", + "tag": "大消费", + "reason": "" + }, + { + "code": "871694", + "name": "中裕科技", + "tag": "化工", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "001218", + "name": "丽臣实业", + "tag": "化工", + "reason": "" + }, + { + "code": "688272", + "name": "*ST富吉", + "tag": "VR", + "reason": "" + }, + { + "code": "301080", + "name": "百普赛斯", + "tag": "专业服务", + "reason": "" + }, + { + "code": "301079", + "name": "邵阳液压", + "tag": "智能机器", + "reason": "" + }, + { + "code": "301081", + "name": "严牌股份", + "tag": "公用", + "reason": "" + }, + { + "code": "605555", + "name": "德昌股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "688737", + "name": "中自科技", + "tag": "化工", + "reason": "" + }, + { + "code": "605566", + "name": "福莱蒽特", + "tag": "化工", + "reason": "" + }, + { + "code": "688553", + "name": "汇宇制药-W", + "tag": "医药", + "reason": "" + }, + { + "code": "688211", + "name": "中科微至", + "tag": "统一大市场", + "reason": "" + }, + { + "code": "605138", + "name": "盛泰集团", + "tag": "大消费", + "reason": "" + }, + { + "code": "688280", + "name": "精进电动-UW", + "tag": "汽车", + "reason": "" + }, + { + "code": "301082", + "name": "久盛电气", + "tag": "电力", + "reason": "" + }, + { + "code": "301089", + "name": "拓新药业", + "tag": "医药", + "reason": "" + }, + { + "code": "301088", + "name": "戎美股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "301091", + "name": "深城交", + "tag": "房地产", + "reason": "" + }, + { + "code": "001288", + "name": "运机集团", + "tag": "房地产", + "reason": "" + }, + { + "code": "301092", + "name": "争光股份", + "tag": "化工", + "reason": "" + }, + { + "code": "301129", + "name": "瑞纳智能", + "tag": "智能机器", + "reason": "" + }, + { + "code": "301169", + "name": "零点有数", + "tag": "专业服务", + "reason": "" + }, + { + "code": "688162", + "name": "巨一科技", + "tag": "智能机器", + "reason": "" + }, + { + "code": "301128", + "name": "强瑞技术", + "tag": "智能机器", + "reason": "" + }, + { + "code": "301188", + "name": "力诺特玻", + "tag": "化工", + "reason": "" + }, + { + "code": "301098", + "name": "金埔园林", + "tag": "房地产", + "reason": "" + }, + { + "code": "301178", + "name": "天亿马", + "tag": "AI", + "reason": "" + }, + { + "code": "603048", + "name": "浙江黎明", + "tag": "汽车", + "reason": "" + }, + { + "code": "688182", + "name": "灿勤科技", + "tag": "AI", + "reason": "" + }, + { + "code": "688232", + "name": "新点软件", + "tag": "AI", + "reason": "" + }, + { + "code": "001267", + "name": "汇绿生态", + "tag": "房地产", + "reason": "" + }, + { + "code": "301185", + "name": "鸥玛软件", + "tag": "AI", + "reason": "" + }, + { + "code": "603219", + "name": "富佳股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "301119", + "name": "正强股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "301099", + "name": "雅创电子", + "tag": "大消费", + "reason": "" + }, + { + "code": "301155", + "name": "海力风电", + "tag": "赛道", + "reason": "" + }, + { + "code": "301133", + "name": "金钟股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "001317", + "name": "三羊马", + "tag": "统一大市场", + "reason": "" + }, + { + "code": "688112", + "name": "鼎阳科技", + "tag": "智能机器", + "reason": "" + }, + { + "code": "301198", + "name": "喜悦智行", + "tag": "化工", + "reason": "" + }, + { + "code": "301108", + "name": "洁雅股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "301213", + "name": "观想科技", + "tag": "AI", + "reason": "" + }, + { + "code": "301167", + "name": "建研设计", + "tag": "房地产", + "reason": "" + }, + { + "code": "301126", + "name": "达嘉维康", + "tag": "医药", + "reason": "" + }, + { + "code": "301179", + "name": "泽宇智能", + "tag": "AI", + "reason": "" + }, + { + "code": "688192", + "name": "迪哲医药-U", + "tag": "医药", + "reason": "" + }, + { + "code": "688246", + "name": "嘉和美康", + "tag": "医药", + "reason": "" + }, + { + "code": "603216", + "name": "梦天家居", + "tag": "房地产", + "reason": "" + }, + { + "code": "301177", + "name": "迪阿股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "301138", + "name": "华研精机", + "tag": "智能机器", + "reason": "" + }, + { + "code": "873132", + "name": "泰鹏智能", + "tag": "大消费", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603071", + "name": "物产环能", + "tag": "大消费", + "reason": "" + }, + { + "code": "301101", + "name": "明月镜片", + "tag": "大消费", + "reason": "" + }, + { + "code": "301100", + "name": "风光股份", + "tag": "化工", + "reason": "" + }, + { + "code": "301096", + "name": "百诚医药", + "tag": "医药", + "reason": "" + }, + { + "code": "301211", + "name": "亨迪药业", + "tag": "医药", + "reason": "" + }, + { + "code": "301113", + "name": "雅艺科技", + "tag": "房地产", + "reason": "" + }, + { + "code": "870357", + "name": "雅葆轩", + "tag": "消费电子", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "600927", + "name": "永安期货", + "tag": "金融", + "reason": "" + }, + { + "code": "301186", + "name": "超达装备", + "tag": "汽车", + "reason": "" + }, + { + "code": "301190", + "name": "善水科技", + "tag": "化工", + "reason": "" + }, + { + "code": "001296", + "name": "长江材料", + "tag": "化工", + "reason": "" + }, + { + "code": "688210", + "name": "统联精密", + "tag": "消费电子", + "reason": "" + }, + { + "code": "688206", + "name": "概伦电子", + "tag": "AI", + "reason": "" + }, + { + "code": "301189", + "name": "奥尼电子", + "tag": "AI", + "reason": "" + }, + { + "code": "301166", + "name": "优宁维", + "tag": "专业服务", + "reason": "" + }, + { + "code": "301127", + "name": "天源环保", + "tag": "公用", + "reason": "" + }, + { + "code": "688227", + "name": "品高股份", + "tag": "AI", + "reason": "" + }, + { + "code": "603176", + "name": "汇通集团", + "tag": "房地产", + "reason": "" + }, + { + "code": "301159", + "name": "三维天地", + "tag": "AI", + "reason": "" + }, + { + "code": "873001", + "name": "纬达光电", + "tag": "VR", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "001234", + "name": "泰慕士", + "tag": "大消费", + "reason": "" + }, + { + "code": "301196", + "name": "唯科科技", + "tag": "化工", + "reason": "" + }, + { + "code": "301136", + "name": "招标股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "301117", + "name": "佳缘科技", + "tag": "AI", + "reason": "" + }, + { + "code": "301158", + "name": "德石股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "301201", + "name": "诚达药业", + "tag": "医药", + "reason": "" + }, + { + "code": "603150", + "name": "万朗磁塑", + "tag": "化工", + "reason": "" + }, + { + "code": "688223", + "name": "晶科能源", + "tag": "赛道", + "reason": "" + }, + { + "code": "688171", + "name": "纬德信息", + "tag": "AI", + "reason": "" + }, + { + "code": "301228", + "name": "实朴检测", + "tag": "专业服务", + "reason": "" + }, + { + "code": "301235", + "name": "华康医疗", + "tag": "医药", + "reason": "" + }, + { + "code": "301106", + "name": "骏成科技", + "tag": "VR", + "reason": "" + }, + { + "code": "688225", + "name": "亚信安全", + "tag": "AI", + "reason": "" + }, + { + "code": "688283", + "name": "坤恒顺维", + "tag": "智能机器", + "reason": "" + }, + { + "code": "001313", + "name": "粤海饲料", + "tag": "农业", + "reason": "" + }, + { + "code": "688267", + "name": "中触媒", + "tag": "化工", + "reason": "" + }, + { + "code": "603215", + "name": "比依股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "873593", + "name": "鼎智科技", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "301181", + "name": "标榜股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "873665", + "name": "科强股份", + "tag": "化工", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "301229", + "name": "纽泰格", + "tag": "汽车", + "reason": "" + }, + { + "code": "301130", + "name": "西点药业", + "tag": "医药", + "reason": "" + }, + { + "code": "301200", + "name": "大族数控", + "tag": "智能机器", + "reason": "" + }, + { + "code": "001266", + "name": "宏英智能", + "tag": "智能机器", + "reason": "" + }, + { + "code": "688281", + "name": "华秦科技", + "tag": "化工", + "reason": "" + }, + { + "code": "301218", + "name": "华是科技", + "tag": "AI", + "reason": "" + }, + { + "code": "301222", + "name": "浙江恒威", + "tag": "赛道", + "reason": "" + }, + { + "code": "603070", + "name": "万控智造", + "tag": "电力", + "reason": "" + }, + { + "code": "301110", + "name": "青木股份", + "tag": "AI", + "reason": "" + }, + { + "code": "688115", + "name": "思林杰", + "tag": "智能机器", + "reason": "" + }, + { + "code": "301131", + "name": "聚赛龙", + "tag": "化工", + "reason": "" + }, + { + "code": "688175", + "name": "高凌信息", + "tag": "AI", + "reason": "" + }, + { + "code": "603261", + "name": "立航科技", + "tag": "军工", + "reason": "" + }, + { + "code": "688207", + "name": "格灵深瞳", + "tag": "AI", + "reason": "" + }, + { + "code": "688282", + "name": "理工导航", + "tag": "AI", + "reason": "" + }, + { + "code": "688150", + "name": "莱特光电", + "tag": "VR", + "reason": "" + }, + { + "code": "301256", + "name": "华融化学", + "tag": "化工", + "reason": "" + }, + { + "code": "301103", + "name": "何氏眼科", + "tag": "医药", + "reason": "" + }, + { + "code": "688238", + "name": "和元生物", + "tag": "医药", + "reason": "" + }, + { + "code": "301237", + "name": "和顺科技", + "tag": "化工", + "reason": "" + }, + { + "code": "603209", + "name": "兴通股份", + "tag": "公用", + "reason": "" + }, + { + "code": "301226", + "name": "祥明智能", + "tag": "智能机器", + "reason": "" + }, + { + "code": "301258", + "name": "富士莱", + "tag": "医药", + "reason": "" + }, + { + "code": "301216", + "name": "万凯新材", + "tag": "化工", + "reason": "" + }, + { + "code": "301263", + "name": "泰恩康", + "tag": "医药", + "reason": "" + }, + { + "code": "301268", + "name": "铭利达", + "tag": "智能机器", + "reason": "" + }, + { + "code": "688337", + "name": "普源精电", + "tag": "智能机器", + "reason": "" + }, + { + "code": "301151", + "name": "冠龙节能", + "tag": "智能机器", + "reason": "" + }, + { + "code": "301279", + "name": "金道科技", + "tag": "智能机器", + "reason": "" + }, + { + "code": "301109", + "name": "军信股份", + "tag": "公用", + "reason": "" + }, + { + "code": "688125", + "name": "安达智能", + "tag": "智能机器", + "reason": "" + }, + { + "code": "301212", + "name": "联盛化学", + "tag": "化工", + "reason": "" + }, + { + "code": "301163", + "name": "宏德股份", + "tag": "智能机器", + "reason": "" + }, + { + "code": "301120", + "name": "新特电气", + "tag": "电力", + "reason": "" + }, + { + "code": "688326", + "name": "经纬恒润-W", + "tag": "汽车", + "reason": "" + }, + { + "code": "301248", + "name": "杰创智能", + "tag": "AI", + "reason": "" + }, + { + "code": "301148", + "name": "嘉戎技术", + "tag": "公用", + "reason": "" + }, + { + "code": "301288", + "name": "清研环境", + "tag": "公用", + "reason": "" + }, + { + "code": "301259", + "name": "艾布鲁", + "tag": "公用", + "reason": "" + }, + { + "code": "603191", + "name": "望变电气", + "tag": "电力", + "reason": "" + }, + { + "code": "688320", + "name": "禾川科技", + "tag": "智能机器", + "reason": "" + }, + { + "code": "301162", + "name": "国能日新", + "tag": "AI", + "reason": "" + }, + { + "code": "688170", + "name": "德龙激光", + "tag": "智能机器", + "reason": "" + }, + { + "code": "001228", + "name": "永泰运", + "tag": "统一大市场", + "reason": "" + }, + { + "code": "001319", + "name": "铭科精技", + "tag": "汽车", + "reason": "" + }, + { + "code": "301257", + "name": "普蕊斯", + "tag": "专业服务", + "reason": "" + }, + { + "code": "301153", + "name": "中科江南", + "tag": "AI", + "reason": "" + }, + { + "code": "603272", + "name": "联翔股份", + "tag": "房地产", + "reason": "" + }, + { + "code": "301183", + "name": "东田微", + "tag": "VR", + "reason": "" + }, + { + "code": "301107", + "name": "瑜欣电子", + "tag": "智能机器", + "reason": "" + }, + { + "code": "688287", + "name": "观典防务", + "tag": "军工", + "reason": "" + }, + { + "code": "301191", + "name": "菲菱科思", + "tag": "AI", + "reason": "" + }, + { + "code": "873690", + "name": "捷众科技", + "tag": "汽车", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "301160", + "name": "翔楼新材", + "tag": "汽车", + "reason": "" + }, + { + "code": "688251", + "name": "井松智能", + "tag": "智能机器", + "reason": "" + }, + { + "code": "838837", + "name": "华原股份", + "tag": "汽车", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "301125", + "name": "腾亚精工", + "tag": "智能机器", + "reason": "" + }, + { + "code": "688348", + "name": "昱能科技", + "tag": "赛道", + "reason": "" + }, + { + "code": "301286", + "name": "侨源股份", + "tag": "化工", + "reason": "" + }, + { + "code": "301156", + "name": "美农生物", + "tag": "农业", + "reason": "" + }, + { + "code": "001226", + "name": "拓山重工", + "tag": "智能机器", + "reason": "" + }, + { + "code": "301220", + "name": "亚香股份", + "tag": "化工", + "reason": "" + }, + { + "code": "688349", + "name": "三一重能", + "tag": "赛道", + "reason": "" + }, + { + "code": "301289", + "name": "国缆检测", + "tag": "专业服务", + "reason": "" + }, + { + "code": "001323", + "name": "慕思股份", + "tag": "大消费", + "reason": "" + }, + { + "code": "001316", + "name": "润贝航科", + "tag": "大消费", + "reason": "" + }, + { + "code": "601089", + "name": "福元医药", + "tag": "医药", + "reason": "" + }, + { + "code": "001268", + "name": "联合精密", + "tag": "智能机器", + "reason": "" + }, + { + "code": "873833", + "name": "美心翼申", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "688237", + "name": "超卓航科", + "tag": "军工", + "reason": "" + }, + { + "code": "301239", + "name": "普瑞眼科", + "tag": "医药", + "reason": "" + }, + { + "code": "301233", + "name": "盛帮股份", + "tag": "化工", + "reason": "" + }, + { + "code": "688400", + "name": "凌云光", + "tag": "智能机器", + "reason": "" + }, + { + "code": "688322", + "name": "奥比中光-UW", + "tag": "消费电子", + "reason": "" + }, + { + "code": "301208", + "name": "中亦科技", + "tag": "AI", + "reason": "" + }, + { + "code": "301139", + "name": "元道通信", + "tag": "AI", + "reason": "" + }, + { + "code": "688053", + "name": "思科瑞", + "tag": "消费电子", + "reason": "" + }, + { + "code": "301312", + "name": "智立方", + "tag": "智能机器", + "reason": "" + }, + { + "code": "001230", + "name": "劲旅环境", + "tag": "公用", + "reason": "" + }, + { + "code": "001336", + "name": "楚环科技", + "tag": "智能机器", + "reason": "" + }, + { + "code": "301306", + "name": "西测测试", + "tag": "半导体", + "reason": "" + }, + { + "code": "301269", + "name": "华大九天", + "tag": "AI", + "reason": "" + }, + { + "code": "603201", + "name": "常润股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "301195", + "name": "北路智控", + "tag": "AI", + "reason": "" + }, + { + "code": "688371", + "name": "菲沃泰", + "tag": "消费电子", + "reason": "" + }, + { + "code": "301333", + "name": "诺思格", + "tag": "医药", + "reason": "" + }, + { + "code": "301197", + "name": "工大科雅", + "tag": "AI", + "reason": "" + }, + { + "code": "688205", + "name": "德科立", + "tag": "VR", + "reason": "" + }, + { + "code": "301132", + "name": "满坤科技", + "tag": "消费电子", + "reason": "" + }, + { + "code": "301192", + "name": "泰祥股份", + "tag": "汽车", + "reason": "" + }, + { + "code": "301336", + "name": "趣睡科技", + "tag": "大消费", + "reason": "" + }, + { + "code": "836547", + "name": "无锡晶海", + "tag": "医药", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "873570", + "name": "坤博精工", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "834950", + "name": "迅安科技", + "tag": "消费电子", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "836699", + "name": "海达尔", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "870726", + "name": "鸿智科技", + "tag": "大消费", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "872953", + "name": "国子软件", + "tag": "AI", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "873726", + "name": "卓兆点胶", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "873693", + "name": "阿为特", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "873679", + "name": "前进科技", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "873703", + "name": "广厦环能", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "871263", + "name": "莱赛激光", + "tag": "智能机器", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "873806", + "name": "云星宇", + "tag": "AI", + "reason": "", + "hidden_tag": "北交所" + }, + { + "code": "603082", + "name": "C北自", + "tag": "智能机器", + "reason": "", + "hidden_tag": "次新股" + }, + { + "code": "301502", + "name": "N华阳智", + "tag": "智能机器", + "reason": "", + "hidden_tag": "次新股" + }, + { + "code": "688709", + "name": "C华微", + "tag": "半导体", + "reason": "" + }, + { + "code": "688584", + "name": "C合晶", + "tag": "半导体", + "reason": "" + }, + { + "code": "301589", + "name": "C诺瓦", + "tag": "AI", + "reason": "" + } +] \ No newline at end of file diff --git a/examples/tag_utils.py b/examples/tag_utils.py new file mode 100644 index 00000000..e69158e0 --- /dev/null +++ b/examples/tag_utils.py @@ -0,0 +1,297 @@ +# -*- coding: utf-8 -*- +import json +import os +from collections import Counter + +from zvt.api.utils import china_stock_code_to_id, get_china_exchange +from zvt.domain import BlockStock, Block, Stock, LimitUpInfo + + +def get_limit_up_reasons(entity_id): + info = LimitUpInfo.query_data( + entity_id=entity_id, order=LimitUpInfo.timestamp.desc(), limit=1, return_type="domain" + ) + + topics = [] + if info and info[0].reason: + topics = topics + info[0].reason.split("+") + return topics + + +def get_concept(code): + with open(os.path.join(os.path.dirname(__file__), "concept.json")) as f: + concept_map = json.load(f) + concepts = [item for sublist in concept_map.values() for item in sublist] + df = BlockStock.query_data(filters=[BlockStock.stock_code == code, BlockStock.name.in_(concepts)]) + return df["name"].tolist() + + +def industry_to_tag(industry): + if industry in ["风电设备", "电池", "光伏设备", "能源金属", "电源设备"]: + return "赛道" + if industry in ["半导体", "电子化学品"]: + return "半导体" + if industry in ["医疗服务", "中药", "化学制药", "生物制品", "医药商业"]: + return "医药" + if industry in ["医疗器械"]: + return "医疗器械" + if industry in ["教育"]: + return "教育" + if industry in ["贸易行业", "家用轻工", "造纸印刷", "酿酒行业", "珠宝首饰", "美容护理", "食品饮料", "旅游酒店", "商业百货", "纺织服装", "家电行业"]: + return "大消费" + if industry in ["小金属", "贵金属", "有色金属", "煤炭行业"]: + return "资源" + if industry in ["消费电子", "电子元件"]: + return "消费电子" + if industry in ["汽车零部件", "汽车服务", "汽车整车"]: + return "汽车" + if industry in ["电机", "通用设备", "专用设备", "仪器仪表"]: + return "智能机器" + if industry in ["电网设备", "电力行业"]: + return "电力" + if industry in ["光学光电子"]: + return "VR" + if industry in ["房地产开发", "房地产服务", "工程建设", "水泥建材", "装修装饰", "装修建材", "工程咨询服务", "钢铁行业", "工程机械"]: + return "房地产" + if industry in ["非金属材料", "包装材料", "化学制品", "化肥行业", "化学原料", "化纤行业", "塑料制品", "玻璃玻纤", "橡胶制品"]: + return "化工" + if industry in ["交运设备", "船舶制造", "航运港口", "公用事业", "燃气", "航空机场", "环保行业", "石油行业", "铁路公路", "采掘行业"]: + return "公用" + if industry in ["证券", "保险", "银行", "多元金融"]: + return "金融" + if industry in ["互联网服务", "软件开发", "计算机设备", "游戏", "通信服务", "通信设备"]: + return "AI" + if industry in ["文化传媒"]: + return "传媒" + if industry in ["农牧饲渔", "农药兽药"]: + return "农业" + if industry in ["物流行业"]: + return "统一大市场" + if industry in ["航天航空", "船舶制造"]: + return "军工" + if industry in ["专业服务"]: + return "专业服务" + + +def build_default_tags(codes, provider="em"): + df_block = Block.query_data(provider=provider, filters=[Block.category == "industry"]) + industry_codes = df_block["code"].tolist() + tags = [] + for code in codes: + block_stocks = BlockStock.query_data( + provider=provider, + filters=[BlockStock.code.in_(industry_codes), BlockStock.stock_code == code], + return_type="domain", + ) + if block_stocks: + block_stock = block_stocks[0] + tags.append( + { + "code": block_stock.stock_code, + "name": block_stock.stock_name, + "tag": industry_to_tag(block_stock.name), + "reason": "", + } + ) + else: + print(f"no industry for {code}") + + return tags + + +def get_main_line_tags(): + with open(os.path.join(os.path.dirname(__file__), "main_line_tags.json")) as f: + return json.load(f) + + +def get_main_line_hidden_tags(): + with open(os.path.join(os.path.dirname(__file__), "main_line_hidden_tags.json")) as f: + return json.load(f) + + +def replace_tags(old_tag="仪器仪表"): + with open(os.path.join(os.path.dirname(__file__), "stock_tags.json")) as f: + stock_tags = json.load(f) + for stock_tag in stock_tags: + if stock_tag["tag"] == old_tag: + df = BlockStock.query_data(filters=[BlockStock.stock_code == stock_tag["code"]]) + blocks = df["name"].tolist() + for block in blocks: + tag = industry_to_tag(industry=block) + if tag: + stock_tag["tag"] = tag + break + + with open("result.json", "w") as json_file: + json.dump(stock_tags, json_file, indent=2, ensure_ascii=False) + + +def check_tags(): + with open(os.path.join(os.path.dirname(__file__), "stock_tags.json")) as f: + stock_tags = json.load(f) + tags = set() + hidden_tags = set() + stocks = [] + final_tags = [] + for stock_tag in stock_tags: + stock_code = stock_tag["code"] + if not stock_code.isdigit() or (len(stock_code) != 6): + print(stock_code) + tags.add(stock_tag["tag"]) + hidden_tags.add(stock_tag.get("hidden_tag")) + if stock_code in stocks: + print(stock_tag) + else: + final_tags.append(stock_tag) + stocks.append(stock_code) + + # with open("result.json", "w") as json_file: + # json.dump(final_tags, json_file, indent=2, ensure_ascii=False) + + print(tags) + print(hidden_tags) + print(len(stocks)) + count = Counter(stocks) + duplicates = [item for item, frequency in count.items() if frequency > 1] + print(duplicates) + + +def get_hidden_code(code): + exchange = get_china_exchange(code=code) + if exchange == "bj": + return "北交所" + + +def get_core_tag(codes): + # 从stock_tags.json读取 + other_codes = [] + with open(os.path.join(os.path.dirname(__file__), "stock_tags.json")) as f: + stock_tags = json.load(f) + code_tag_hidden_tag_list = [ + ( + stock_tag["code"], + stock_tag["tag"], + stock_tag.get("hidden_tag") if stock_tag.get("hidden_tag") else get_hidden_code(stock_tag["code"]), + ) + for stock_tag in stock_tags + if stock_tag["code"] in codes + ] + other_codes = [code for code in codes if code not in [item[0] for item in code_tag_hidden_tag_list]] + for code in other_codes: + tags = get_limit_up_reasons(entity_id=china_stock_code_to_id(code=code)) + if tags: + code_tag_hidden_tag_list.append((code, tags[0], None)) + else: + code_tag_hidden_tag_list.append((code, "未知", get_hidden_code(code))) + + return code_tag_hidden_tag_list + + +def group_stocks_by_tag(entities, hidden_tags=None): + code_entities_map = {entity.code: entity for entity in entities} + + tag_stocks = {} + code_tag_hidden_tag_list = get_core_tag([entity.code for entity in entities]) + for code, tag, hidden_tag in code_tag_hidden_tag_list: + if hidden_tags and (hidden_tag in hidden_tags): + tag_stocks.setdefault(hidden_tag, []) + tag_stocks.get(hidden_tag).append(code_entities_map.get(code)) + if (tag != hidden_tag) or (not hidden_tags): + tag_stocks.setdefault(tag, []) + tag_stocks.get(tag).append(code_entities_map.get(code)) + + sorted_entities = sorted(tag_stocks.items(), key=lambda x: len(x[1]), reverse=True) + + return sorted_entities + + +def build_stock_tags_by_block(block_name, tag, hidden_tag): + block_stocks = BlockStock.query_data(filters=[BlockStock.name == block_name], return_type="domain") + datas = [ + { + "code": block_stock.stock_code, + "name": block_stock.stock_name, + "tag": tag, + "hidden_tag": hidden_tag, + "reason": "", + } + for block_stock in block_stocks + ] + + # Specify the file path where you want to save the JSON data + file_path = f"{tag}.json" + + # Write JSON data to the file + with open(file_path, "w") as json_file: + json.dump(datas, json_file, indent=2, ensure_ascii=False) + + +def merge_tags(current_tags, added_tags, force_update=False): + code_tags_map = {item["code"]: item for item in current_tags} + + # Merge + for added_tag in added_tags: + code_from_added = added_tag["code"] + if code_from_added not in code_tags_map: + current_tags.append(added_tag) + else: + # update hidden_tag from added_tag + if force_update or (not code_tags_map[code_from_added].get("hidden_tag")): + code_tags_map[code_from_added]["hidden_tag"] = added_tag["hidden_tag"] + return current_tags + + +def merge_tags_file(current_tags_file, added_tags_file, result_file, force_update=False): + # current_tags_file读取 + with open(os.path.join(os.path.dirname(__file__), current_tags_file)) as f: + current_tags = json.load(f) + # added_tags_file读取 + with open(os.path.join(os.path.dirname(__file__), added_tags_file)) as f: + added_tags = json.load(f) + + current_tags = merge_tags(current_tags, added_tags, force_update) + with open(result_file, "w") as json_file: + json.dump(current_tags, json_file, indent=2, ensure_ascii=False) + + +def complete_tags(): + with open(os.path.join(os.path.dirname(__file__), "stock_tags.json")) as f: + stock_tags = json.load(f) + current_codes = [stock_tag["code"] for stock_tag in stock_tags] + df = Stock.query_data( + provider="em", + filters=[ + Stock.code.not_in(current_codes), + Stock.name.not_like("%退%"), + ], + ) + + codes = df["code"].tolist() + print(len(codes)) + added_tags = build_default_tags(codes=codes, provider="em") + + with open("result.json", "w") as json_file: + json.dump(stock_tags + added_tags, json_file, indent=2, ensure_ascii=False) + + +def refresh_hidden_tags(): + with open(os.path.join(os.path.dirname(__file__), "stock_tags.json")) as f: + stock_tags = json.load(f) + for stock_tag in stock_tags: + if not stock_tag.get("hidden_tag"): + exchange = get_china_exchange(code=stock_tag["code"]) + if exchange == "bj": + stock_tag["hidden_tag"] = "北交所" + + with open("result.json", "w") as json_file: + json.dump(stock_tags, json_file, indent=2, ensure_ascii=False) + + +if __name__ == "__main__": + # build_stock_tags(block_name="化工原料", tag="化工", hidden_tag=None) + # merge_tags(tags_file="stock_tags.json", hidden_tags_file="化工.json", result_file="result.json", force_update=False) + # replace_tags(old_tag="仪器仪表") + # check_tags() + # complete_tags() + # refresh_hidden_tags() + print(get_concept(code="688787")) diff --git a/examples/trader/dragon_and_tiger_trader.py b/examples/trader/dragon_and_tiger_trader.py new file mode 100644 index 00000000..065f1939 --- /dev/null +++ b/examples/trader/dragon_and_tiger_trader.py @@ -0,0 +1,93 @@ +# -*- coding: utf-8 -*- +from typing import List, Union + +import pandas as pd + +from zvt.contract import IntervalLevel +from zvt.contract.factor import Factor, Transformer, Accumulator +from zvt.domain import Stock, DragonAndTiger +from zvt.trader import StockTrader + + +class DragonTigerFactor(Factor): + def __init__( + self, + provider: str = "em", + entity_provider: str = "em", + entity_ids: List[str] = None, + exchanges: List[str] = None, + codes: List[str] = None, + start_timestamp: Union[str, pd.Timestamp] = None, + end_timestamp: Union[str, pd.Timestamp] = None, + columns: List = None, + filters: List = [DragonAndTiger.dep1 == "机构专用"], + order: object = None, + limit: int = None, + level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY, + category_field: str = "entity_id", + time_field: str = "timestamp", + keep_window: int = None, + keep_all_timestamp: bool = False, + fill_method: str = "ffill", + effective_number: int = None, + transformer: Transformer = None, + accumulator: Accumulator = None, + need_persist: bool = False, + only_compute_factor: bool = False, + factor_name: str = None, + clear_state: bool = False, + only_load_factor: bool = False, + ) -> None: + super().__init__( + DragonAndTiger, + Stock, + provider, + entity_provider, + entity_ids, + exchanges, + codes, + start_timestamp, + end_timestamp, + columns, + filters, + order, + limit, + level, + category_field, + time_field, + keep_window, + keep_all_timestamp, + fill_method, + effective_number, + transformer, + accumulator, + need_persist, + only_compute_factor, + factor_name, + clear_state, + only_load_factor, + ) + + def compute_result(self): + self.factor_df["filter_result"] = True + super().compute_result() + + +class MyTrader(StockTrader): + def init_factors( + self, entity_ids, entity_schema, exchanges, codes, start_timestamp, end_timestamp, adjust_type=None + ): + return [ + DragonTigerFactor( + entity_ids=entity_ids, + exchanges=exchanges, + codes=codes, + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + ) + ] + + +if __name__ == "__main__": + trader = MyTrader(start_timestamp="2020-01-01", end_timestamp="2022-05-01") + trader.run() diff --git a/examples/trader/follow_ii_trader.py b/examples/trader/follow_ii_trader.py new file mode 100644 index 00000000..475e6452 --- /dev/null +++ b/examples/trader/follow_ii_trader.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +import pandas as pd + +from zvt.api.utils import get_recent_report_date +from zvt.contract import ActorType, AdjustType +from zvt.domain import StockActorSummary, Stock1dKdata, Stock +from zvt.trader import StockTrader +from zvt.utils.pd_utils import pd_is_not_null +from zvt.utils.time_utils import is_same_date, to_pd_timestamp + + +class FollowIITrader(StockTrader): + finish_date = None + + def on_time(self, timestamp: pd.Timestamp): + recent_report_date = to_pd_timestamp(get_recent_report_date(timestamp)) + if self.finish_date and is_same_date(recent_report_date, self.finish_date): + return + filters = [ + StockActorSummary.actor_type == ActorType.raised_fund.value, + StockActorSummary.report_date == recent_report_date, + ] + + if self.entity_ids: + filters = filters + [StockActorSummary.entity_id.in_(self.entity_ids)] + + df = StockActorSummary.query_data(filters=filters) + + if pd_is_not_null(df): + self.logger.info(f"{df}") + self.finish_date = recent_report_date + + long_df = df[df["change_ratio"] > 0.05] + short_df = df[df["change_ratio"] < -0.5] + try: + long_targets = set(long_df["entity_id"].to_list()) + short_targets = set(short_df["entity_id"].to_list()) + if long_targets: + self.buy(timestamp=timestamp, entity_ids=long_targets) + if short_targets: + self.sell(timestamp=timestamp, entity_ids=short_targets) + except Exception as e: + self.logger.error(e) + + +if __name__ == "__main__": + code = "600519" + Stock.record_data(provider="em") + Stock1dKdata.record_data(code=code, provider="em") + StockActorSummary.record_data(code=code, provider="em") + FollowIITrader( + start_timestamp="2002-01-01", + end_timestamp="2021-01-01", + codes=[code], + provider="em", + adjust_type=AdjustType.qfq, + profit_threshold=None, + ).run() diff --git a/examples/trader/keep_run_trader.py b/examples/trader/keep_run_trader.py index 5710ec1d..2e0ab474 100644 --- a/examples/trader/keep_run_trader.py +++ b/examples/trader/keep_run_trader.py @@ -1,74 +1,76 @@ # -*- coding: utf-8 -*- import logging -from zvt.api import get_top_volume_entities from zvt.api.stats import get_top_fund_holding_stocks -from zvt.api.trader_info_api import clear_trader +from zvt.api.stats import get_top_volume_entities from zvt.contract import IntervalLevel -from zvt.factors import TargetSelector, GoldCrossFactor, BullFactor +from zvt.factors.macd.macd_factor import BullFactor from zvt.trader import StockTrader -from zvt.utils.time_utils import split_time_interval, next_date +from zvt.trader.trader_info_api import clear_trader +from zvt.utils.time_utils import split_time_interval, date_time_by_interval logger = logging.getLogger(__name__) class MultipleLevelTrader(StockTrader): - def init_selectors(self, entity_ids, entity_schema, exchanges, codes, start_timestamp, end_timestamp, - adjust_type=None): - start_timestamp = next_date(start_timestamp, -50) + def init_factors( + self, entity_ids, entity_schema, exchanges, codes, start_timestamp, end_timestamp, adjust_type=None + ): + start_timestamp = date_time_by_interval(start_timestamp, -50) - # 周线策略 - week_selector = TargetSelector(entity_ids=entity_ids, entity_schema=entity_schema, exchanges=exchanges, - codes=codes, start_timestamp=next_date(start_timestamp, -200), - end_timestamp=end_timestamp, - provider='joinquant', level=IntervalLevel.LEVEL_1WEEK, long_threshold=0.7) - week_bull_factor = BullFactor(entity_ids=entity_ids, entity_schema=entity_schema, - exchanges=exchanges, - codes=codes, start_timestamp=next_date(start_timestamp, -200), - end_timestamp=end_timestamp, - provider='joinquant', level=IntervalLevel.LEVEL_1WEEK) - week_selector.add_filter_factor(week_bull_factor) + return [ + BullFactor( + entity_ids=entity_ids, + entity_schema=entity_schema, + exchanges=exchanges, + codes=codes, + start_timestamp=date_time_by_interval(start_timestamp, -200), + end_timestamp=end_timestamp, + provider="joinquant", + level=IntervalLevel.LEVEL_1WEEK, + ), + GoldCrossFactor( + entity_ids=entity_ids, + entity_schema=entity_schema, + exchanges=exchanges, + codes=codes, + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + provider="joinquant", + level=IntervalLevel.LEVEL_1DAY, + ), + ] - # 日线策略 - day_selector = TargetSelector(entity_ids=entity_ids, entity_schema=entity_schema, exchanges=exchanges, - codes=codes, start_timestamp=start_timestamp, end_timestamp=end_timestamp, - provider='joinquant', level=IntervalLevel.LEVEL_1DAY, long_threshold=0.7) - day_gold_cross_factor = GoldCrossFactor(entity_ids=entity_ids, entity_schema=entity_schema, exchanges=exchanges, - codes=codes, start_timestamp=start_timestamp, - end_timestamp=end_timestamp, - provider='joinquant', level=IntervalLevel.LEVEL_1DAY) - day_selector.add_filter_factor(day_gold_cross_factor) - # 同时使用日线,周线级别 - self.selectors.append(day_selector) - self.selectors.append(week_selector) - - -if __name__ == '__main__': - start = '2019-01-01' - end = '2021-01-01' - trader_name = 'keep_run_trader' +if __name__ == "__main__": + start = "2019-01-01" + end = "2021-01-01" + trader_name = "keep_run_trader" clear_trader(trader_name=trader_name) for time_interval in split_time_interval(start=start, end=end, interval=40): start_timestamp = time_interval[0] end_timestamp = time_interval[-1] # 成交量 - vol_df = get_top_volume_entities(entity_type='stock', - start_timestamp=next_date(start_timestamp, -50), - end_timestamp=start_timestamp, - pct=0.3) + vol_df = get_top_volume_entities( + entity_type="stock", + start_timestamp=date_time_by_interval(start_timestamp, -50), + end_timestamp=start_timestamp, + pct=0.3, + ) # 机构重仓 - ii_df = get_top_fund_holding_stocks(timestamp=start_timestamp, pct=0.3, by='trading') + ii_df = get_top_fund_holding_stocks(timestamp=start_timestamp, pct=0.3, by="trading") current_entity_pool = list(set(vol_df.index.tolist()) & set(ii_df.index.tolist())) - logger.info(f'current_entity_pool({len(current_entity_pool)}):{current_entity_pool}') + logger.info(f"current_entity_pool({len(current_entity_pool)}):{current_entity_pool}") - trader = MultipleLevelTrader(start_timestamp=start_timestamp, - end_timestamp=end_timestamp, - entity_ids=current_entity_pool, - trader_name=trader_name, - keep_history=True, - draw_result=False, - rich_mode=False) + trader = MultipleLevelTrader( + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + entity_ids=current_entity_pool, + trader_name=trader_name, + keep_history=True, + draw_result=False, + rich_mode=False, + ) trader.run() diff --git a/examples/trader/ma_trader.py b/examples/trader/ma_trader.py index 561a2c31..a30e1259 100644 --- a/examples/trader/ma_trader.py +++ b/examples/trader/ma_trader.py @@ -1,45 +1,55 @@ # -*- coding: utf-8 -*- from zvt.contract import IntervalLevel -from zvt.factors import CrossMaFactor -from zvt.factors.target_selector import TargetSelector -from zvt.factors.macd import BullFactor +from zvt.factors.ma.ma_factor import CrossMaFactor +from zvt.factors.macd.macd_factor import BullFactor from zvt.trader.trader import StockTrader class MyMaTrader(StockTrader): - def init_selectors(self, entity_ids, entity_schema, exchanges, codes, start_timestamp, end_timestamp, - adjust_type=None): - myselector = TargetSelector(entity_ids=entity_ids, entity_schema=entity_schema, exchanges=exchanges, - codes=codes, start_timestamp=start_timestamp, end_timestamp=end_timestamp, - provider='joinquant') - - myselector.add_filter_factor( - CrossMaFactor(entity_ids=entity_ids, entity_schema=entity_schema, exchanges=exchanges, - codes=codes, start_timestamp=start_timestamp, end_timestamp=end_timestamp, - windows=[5, 10], need_persist=False)) - - self.selectors.append(myselector) + def init_factors( + self, entity_ids, entity_schema, exchanges, codes, start_timestamp, end_timestamp, adjust_type=None + ): + return [ + CrossMaFactor( + entity_ids=entity_ids, + entity_schema=entity_schema, + exchanges=exchanges, + codes=codes, + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + windows=[5, 10], + need_persist=False, + ) + ] class MyBullTrader(StockTrader): - def init_selectors(self, entity_ids, entity_schema, exchanges, codes, start_timestamp, end_timestamp, - adjust_type=None): - myselector = TargetSelector(entity_ids=entity_ids, entity_schema=entity_schema, exchanges=exchanges, - codes=codes, start_timestamp=start_timestamp, end_timestamp=end_timestamp, - provider='joinquant') - - myselector.add_filter_factor( - BullFactor(entity_ids=entity_ids, entity_schema=entity_schema, exchanges=exchanges, - codes=codes, start_timestamp=start_timestamp, end_timestamp=end_timestamp, adjust_type='hfq')) - - self.selectors.append(myselector) + def init_factors( + self, entity_ids, entity_schema, exchanges, codes, start_timestamp, end_timestamp, adjust_type=None + ): + return [ + BullFactor( + entity_ids=entity_ids, + entity_schema=entity_schema, + exchanges=exchanges, + codes=codes, + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + adjust_type="hfq", + ) + ] -if __name__ == '__main__': +if __name__ == "__main__": # single stock with cross ma factor - MyBullTrader(codes=['000338'], level=IntervalLevel.LEVEL_1DAY, start_timestamp='2019-01-01', - end_timestamp='2020-06-30', trader_name='000338_ma_trader').run() + MyBullTrader( + codes=["000338"], + level=IntervalLevel.LEVEL_1DAY, + start_timestamp="2019-01-01", + end_timestamp="2019-06-30", + trader_name="000338_ma_trader", + ).run() # single stock with bull factor # MyBullTrader(codes=['000338'], level=IntervalLevel.LEVEL_1DAY, start_timestamp='2018-01-01', diff --git a/examples/trader/macd_day_trader.py b/examples/trader/macd_day_trader.py index 3ca17e94..9e5de408 100644 --- a/examples/trader/macd_day_trader.py +++ b/examples/trader/macd_day_trader.py @@ -4,31 +4,35 @@ import pandas as pd from zvt.contract import IntervalLevel -from zvt.factors import TargetSelector, GoldCrossFactor +from zvt.contract.factor import Factor +from zvt.factors.macd.macd_factor import GoldCrossFactor from zvt.trader import TradingSignal from zvt.trader.trader import StockTrader + # 依赖数据 # data_schema: Stock1dHfqKdata # provider: joinquant -from zvt.utils import next_date +from zvt.utils.time_utils import date_time_by_interval class MacdDayTrader(StockTrader): - - def init_selectors(self, entity_ids, entity_schema, exchanges, codes, start_timestamp, end_timestamp, - adjust_type=None): + def init_factors( + self, entity_ids, entity_schema, exchanges, codes, start_timestamp, end_timestamp, adjust_type=None + ): # 日线策略 - start_timestamp = next_date(start_timestamp, -50) - day_selector = TargetSelector(entity_ids=entity_ids, entity_schema=entity_schema, exchanges=exchanges, - codes=codes, start_timestamp=start_timestamp, end_timestamp=end_timestamp, - provider='joinquant', level=IntervalLevel.LEVEL_1DAY, long_threshold=0.7) - day_gold_cross_factor = GoldCrossFactor(entity_ids=entity_ids, entity_schema=entity_schema, exchanges=exchanges, - codes=codes, start_timestamp=start_timestamp, - end_timestamp=end_timestamp, - provider='joinquant', level=IntervalLevel.LEVEL_1DAY) - day_selector.add_filter_factor(day_gold_cross_factor) - - self.selectors.append(day_selector) + start_timestamp = date_time_by_interval(start_timestamp, -50) + return [ + GoldCrossFactor( + entity_ids=entity_ids, + entity_schema=entity_schema, + exchanges=exchanges, + codes=codes, + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + provider="joinquant", + level=IntervalLevel.LEVEL_1DAY, + ) + ] def on_profit_control(self): # 覆盖该函数做止盈 止损 @@ -42,10 +46,6 @@ def on_trading_signals(self, trading_signals: List[TradingSignal]): # 批量处理交易信号,比如连接交易接口,发邮件,微信推送等 super().on_trading_signals(trading_signals) - def on_trading_signal(self, trading_signal: TradingSignal): - # 处理交易信号,比如连接交易接口,发邮件,微信推送等 - super().on_trading_signal(trading_signal) - def on_trading_open(self, timestamp): # 开盘自定义逻辑 super().on_trading_open(timestamp) @@ -70,14 +70,15 @@ def short_position_control(self): # 空头仓位管理 return super().short_position_control() - def on_targets_filtered(self, timestamp, level, selector: TargetSelector, long_targets: List[str], - short_targets: List[str]) -> Tuple[List[str], List[str]]: + def on_factor_targets_filtered( + self, timestamp, level, factor: Factor, long_targets: List[str], short_targets: List[str] + ) -> Tuple[List[str], List[str]]: # 过滤某级别选出的 标的 - return super().on_targets_filtered(timestamp, level, selector, long_targets, short_targets) + return super().on_factor_targets_filtered(timestamp, level, factor, long_targets, short_targets) -if __name__ == '__main__': - trader = MacdDayTrader(start_timestamp='2019-01-01', end_timestamp='2020-01-01') +if __name__ == "__main__": + trader = MacdDayTrader(start_timestamp="2019-01-01", end_timestamp="2020-01-01") trader.run() # f = VolFactor(start_timestamp='2020-01-01', end_timestamp='2020-04-01') # print(f.result_df) diff --git a/examples/trader/macd_week_and_day_trader.py b/examples/trader/macd_week_and_day_trader.py index 6893eda7..3bfb618a 100644 --- a/examples/trader/macd_week_and_day_trader.py +++ b/examples/trader/macd_week_and_day_trader.py @@ -2,7 +2,7 @@ from typing import List, Tuple from zvt.contract import IntervalLevel -from zvt.factors import TargetSelector, GoldCrossFactor +from zvt.factors.macd.macd_factor import GoldCrossFactor from zvt.trader.trader import StockTrader @@ -10,38 +10,38 @@ # dataschema: Stock1dHfqKdata Stock1wkHfqKdata # provider: joinquant class MultipleLevelTrader(StockTrader): - def init_selectors(self, entity_ids, entity_schema, exchanges, codes, start_timestamp, end_timestamp, - adjust_type=None): - # 周线策略 - week_selector = TargetSelector(entity_ids=entity_ids, entity_schema=entity_schema, exchanges=exchanges, - codes=codes, start_timestamp=start_timestamp, end_timestamp=end_timestamp, - provider='joinquant', level=IntervalLevel.LEVEL_1WEEK, long_threshold=0.7) - week_gold_cross_factor = GoldCrossFactor(entity_ids=entity_ids, entity_schema=entity_schema, - exchanges=exchanges, - codes=codes, start_timestamp=start_timestamp, - end_timestamp=end_timestamp, - provider='joinquant', level=IntervalLevel.LEVEL_1WEEK) - week_selector.add_filter_factor(week_gold_cross_factor) - - # 日线策略 - day_selector = TargetSelector(entity_ids=entity_ids, entity_schema=entity_schema, exchanges=exchanges, - codes=codes, start_timestamp=start_timestamp, end_timestamp=end_timestamp, - provider='joinquant', level=IntervalLevel.LEVEL_1DAY, long_threshold=0.7) - day_gold_cross_factor = GoldCrossFactor(entity_ids=entity_ids, entity_schema=entity_schema, exchanges=exchanges, - codes=codes, start_timestamp=start_timestamp, - end_timestamp=end_timestamp, - provider='joinquant', level=IntervalLevel.LEVEL_1DAY) - day_selector.add_filter_factor(day_gold_cross_factor) - - # 同时使用日线,周线级别 - self.selectors.append(day_selector) - self.selectors.append(week_selector) + def init_factors( + self, entity_ids, entity_schema, exchanges, codes, start_timestamp, end_timestamp, adjust_type=None + ): + # 同时使用周线和日线策略 + return [ + GoldCrossFactor( + entity_ids=entity_ids, + entity_schema=entity_schema, + exchanges=exchanges, + codes=codes, + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + provider="joinquant", + level=IntervalLevel.LEVEL_1WEEK, + ), + GoldCrossFactor( + entity_ids=entity_ids, + entity_schema=entity_schema, + exchanges=exchanges, + codes=codes, + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + provider="joinquant", + level=IntervalLevel.LEVEL_1DAY, + ), + ] def on_targets_selected_from_levels(self, timestamp) -> Tuple[List[str], List[str]]: # 过滤多级别做 多/空 的标的 return super().on_targets_selected_from_levels(timestamp) -if __name__ == '__main__': - trader = MultipleLevelTrader(start_timestamp='2019-01-01', end_timestamp='2020-01-01') +if __name__ == "__main__": + trader = MultipleLevelTrader(start_timestamp="2019-01-01", end_timestamp="2020-01-01") trader.run() diff --git a/examples/utils.py b/examples/utils.py new file mode 100644 index 00000000..6811dd4f --- /dev/null +++ b/examples/utils.py @@ -0,0 +1,159 @@ +# -*- coding: utf-8 -*- +import json +import logging +import os + +import pandas as pd + +from zvt.domain import StockNews, Stock, LimitUpInfo +from zvt.utils.time_utils import date_time_by_interval, today + +logger = logging.getLogger(__name__) + + +def get_hot_words_config(): + with open(os.path.join(os.path.dirname(__file__), "hot.json")) as f: + return json.load(f) + + +def count_hot_words(text: str): + text = text.upper() + hot_words_config = get_hot_words_config() + word_stats = {} + topic_stats = {} + for topic in hot_words_config: + topic_count = 0 + for word in hot_words_config[topic]: + word_stats[word] = text.lower().count(word) + topic_count = topic_count + word_stats[word] + topic_stats[topic] = topic_count + return topic_stats, word_stats + + +def hot_stats(data: pd.Series): + pass + + +def group_stocks_by_topic( + keyword=None, entities=None, hot_words_config=None, start_timestamp=None, days_ago=60, threshold=3 +): + """ + + :param keyword: + :param entities: + :param hot_words_config: hot words config为二重结构,即 主题:[分支1,分支2,...]的形式 + 比如一个有效的item:{"华为":["华为", "mate pro", "星闪", "问界"]} + :param start_timestamp: + :param days_ago: + :param threshold: + :return: + """ + if not start_timestamp: + start_timestamp = date_time_by_interval(today(), -days_ago) + stock_map = {} + + entity_ids = None + if entities: + entity_ids = [entity.entity_id for entity in entities] + else: + entities = Stock.query_data(provider="em", return_type="domain") + + for entity in entities: + stock_map[entity.entity_id] = {"code": entity.code, "name": entity.name} + + filters = None + if keyword: + filters = [StockNews.news_title.contains(keyword)] + df = StockNews.query_data(start_timestamp=start_timestamp, entity_ids=entity_ids, filters=filters) + df = df.groupby("entity_id")["news_title"].apply(",".join).reset_index() + + if not hot_words_config: + hot_words_config = get_hot_words_config() + + hot_stocks_map = {} + topic_count = {} + word_count = {} + for _, row in df[["entity_id", "news_title"]].iterrows(): + entity_id = row["entity_id"] + text = row["news_title"] + + is_hot = False + for topic in hot_words_config: + topic_count.setdefault(topic, 0) + for words in hot_words_config[topic]: + hot_stocks_map.setdefault(words, []) + word_count.setdefault(words, 0) + count = 0 + for word in words.split(","): + count = text.lower().count(word) + count + if count >= threshold: + word_count[words] = word_count[words] + 1 + topic_count[topic] = topic_count[topic] + 1 + hot_stocks_map[words].append( + (f"{stock_map[entity_id]['code']}({stock_map[entity_id]['name']})", count) + ) + is_hot = True + if not is_hot: + hot_stocks_map.setdefault("其他", []) + hot_stocks_map["其他"].append((f"{stock_map[entity_id]['code']}({stock_map[entity_id]['name']})", 0)) + + sorted_topics = sorted(topic_count.items(), key=lambda item: item[1], reverse=True) + sorted_words = sorted(word_count.items(), key=lambda item: item[1], reverse=True) + + result = [] + for topic, count in sorted_topics: + topic_words = hot_words_config[topic] + topic_words_stocks = [ + (f"{words}({count})", sorted(hot_stocks_map[words], key=lambda item: item[1], reverse=True)) + for (words, count) in sorted_words + if words in topic_words + ] + result.append((f"{topic}({count})", topic_words_stocks)) + + result.append(("其他", [("其他", hot_stocks_map.get("其他", ""))])) + + return result + + +def msg_group_stocks_by_topic( + keyword=None, entities=None, hot_words_config=None, start_timestamp=None, days_ago=60, threshold=3 +): + group_info = group_stocks_by_topic( + keyword=keyword, + entities=entities, + hot_words_config=hot_words_config, + start_timestamp=start_timestamp, + days_ago=days_ago, + threshold=threshold, + ) + msg = "" + for group in group_info: + topic = group[0] + msg = msg + f"^^^^^^ {topic} ^^^^^^\n" + for topic_word, stocks_count in group[1]: + msg = msg + f"{topic_word}\n" + stocks = [f"{stock_count[0]} {stock_count[1]}" for stock_count in stocks_count] + msg = msg + "\n".join(stocks) + "\n" + return msg + + +def get_hot_topics(start_timestamp=None, days_ago=20, limit=15): + if not start_timestamp: + start_timestamp = date_time_by_interval(today(), -days_ago) + df = LimitUpInfo.query_data(start_timestamp=start_timestamp, columns=["reason"]) + df["reason"] = df["reason"].str.split("+") + result = df["reason"].tolist() + result = [item for sublist in result for item in sublist] + result = pd.Series(result) + result = result.value_counts() + result = result[:limit].to_dict() + return result + + +if __name__ == "__main__": + # ids = get_top_performance_entities_by_periods(entity_provider="em", data_provider="em") + # + # entities = get_entities(provider="em", entity_type="stock", entity_ids=ids, return_type="domain") + # + # print(msg_group_stocks_by_topic(entities=entities, threshold=1)) + get_hot_topics(days_ago=10) diff --git a/examples/z.sh b/examples/z.sh new file mode 100644 index 00000000..66e557a2 --- /dev/null +++ b/examples/z.sh @@ -0,0 +1,3 @@ +nohup python examples/data_runner/kdata_runner.py >/dev/null 2>&1 & +nohup python examples/reports/report_tops.py >/dev/null 2>&1 & +nohup python examples/reports/report_vol_up.py >/dev/null 2>&1 & \ No newline at end of file diff --git a/init_env.sh b/init_env.sh index 0b697b99..918c072c 100755 --- a/init_env.sh +++ b/init_env.sh @@ -24,7 +24,7 @@ if ! which python > /dev/null; then fi pip_opt='' -pip_opt='-i http://pypi.douban.com/simple --trusted-host pypi.douban.com' +#pip_opt='-i https://pypi.tuna.tsinghua.edu.cn/simple --trusted-host pypi.tuna.tsinghua.edu.cn' if ! which virtualenv > /dev/null; then echo -e "virtualenv not found! Install? (y/n) \c" @@ -41,7 +41,7 @@ fi source $BASEDIR/ve/bin/activate cd $BASEDIR -export PYTHONPATH=$PYTHONPATH:. +export PYTHONPATH=$PYTHONPATH:./src if [ ! -f "$BASEDIR/ve/updated" -o $BASEDIR/requirements.txt -nt $BASEDIR/ve/updated ]; then pip install -r $BASEDIR/requirements.txt $pip_opt diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..e34796ec --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,2 @@ +[tool.black] +line-length = 120 \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index 8e55bafa..c3794bd5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,15 +1,19 @@ -requests == 2.20.1 -SQLAlchemy == 1.3.24 -pandas == 1.1.4 -arrow == 0.11.0 -tzlocal == 1.5.1 -xlrd == 1.2.0 -demjson == 2.2.4 -marshmallow-sqlalchemy == 0.23.1 -marshmallow == 3.2.2 -plotly==4.12.0 -dash==1.17.0 -simplejson==3.16.0 -jqdatapy==0.1.6 -dash-bootstrap-components -dash_daq \ No newline at end of file +requests==2.32.0 +SQLAlchemy==2.0.36 +pandas==2.2.3 +pydantic==2.6.4 +arrow==1.2.3 +openpyxl==3.1.1 +demjson3==3.0.6 +plotly==5.13.0 +dash==2.18.2 +jqdatapy==0.1.8 +dash-bootstrap-components==1.3.1 +dash_daq==0.5.0 +scikit-learn==1.5.2 +fastapi==0.110.0 +fastapi-pagination==0.12.23 +apscheduler==3.10.4 +eastmoneypy==0.1.9 +orjson==3.10.3 +numpy==2.1.3 \ No newline at end of file diff --git a/requirements/dev.txt b/requirements/dev.txt new file mode 100644 index 00000000..96af3f0d --- /dev/null +++ b/requirements/dev.txt @@ -0,0 +1,4 @@ +# dev tools +pytest == 6.2.0 +pre-commit == 2.15.0 +rst2pdf diff --git a/requirements/docs.txt b/requirements/docs.txt new file mode 100644 index 00000000..481843c0 --- /dev/null +++ b/requirements/docs.txt @@ -0,0 +1,6 @@ +sphinx==8.1.3 +sphinx_autodoc_typehints==3.0.1 +rst2pdf==0.103.1 +ipython==8.29.0 +sphinx_rtd_theme==3.0.2 +readthedocs-sphinx-search==0.3.2 \ No newline at end of file diff --git a/setup.py b/setup.py index 6d62e86d..4b29a93f 100644 --- a/setup.py +++ b/setup.py @@ -7,67 +7,73 @@ # Always prefer setuptools over distutils from setuptools import setup, find_packages -try: - # for pip >= 10 - from pip._internal.req import parse_requirements -except ImportError: - # for pip <= 9.0.3 - from pip.req import parse_requirements - here = path.abspath(path.dirname(__file__)) # Get the long description from the README file -with open(path.join(here, 'README.md'), encoding='utf-8') as f: +with open(path.join(here, "README.md"), encoding="utf-8") as f: long_description = f.read() -# Arguments marked as "Required" below must be included for upload to PyPI. -# Fields marked as "Optional" may be commented out. - -install_reqs = parse_requirements("requirements.txt", session=False) - -try: - requirements = [str(ir.req) for ir in install_reqs] -except: - requirements = [str(ir.requirement) for ir in install_reqs] - setup( - name='zvt', - version='0.9.4', - description='unified,modular quant framework for human beings ', + name="zvt", + version="0.13.1", + description="unified,modular quant framework for human beings ", long_description=long_description, - url='https://github.com/zvtvz/zvt', - author='foolcage', - author_email='5533061@qq.com', + url="https://github.com/zvtvz/zvt", + author="foolcage", + author_email="5533061@qq.com", classifiers=[ # Optional - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: Customer Service', - 'Intended Audience :: Education', - 'Intended Audience :: Financial and Insurance Industry', - 'Topic :: Software Development :: Build Tools', - 'Topic :: Office/Business :: Financial :: Investment', - 'License :: OSI Approved :: MIT License', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8' + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "Intended Audience :: Customer Service", + "Intended Audience :: Education", + "Intended Audience :: Financial and Insurance Industry", + "Topic :: Software Development :: Build Tools", + "Topic :: Office/Business :: Financial :: Investment", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", ], - keywords='quant stock finance fintech big-data zvt technical-analysis trading-platform pandas fundamental-analysis', - packages=find_packages(include=['zvt.*', 'zvt']), - python_requires='>=3.5, <4', + keywords="quant stock finance fintech big-data zvt technical-analysis trading-platform pandas fundamental-analysis", + package_dir={"": "src"}, + packages=find_packages(where="src"), + python_requires=">=3.9, <4", include_package_data=True, - install_requires=requirements, + install_requires=[ + "requests==2.31.0", + "SQLAlchemy==2.0.36", + "pandas==2.0.3", + "pydantic==2.6.4", + "arrow==1.2.3", + "openpyxl==3.1.1", + "demjson3==3.0.6", + "plotly==5.13.0", + "dash==2.18.2", + "jqdatapy==0.1.8", + "dash-bootstrap-components==1.3.1", + "dash_daq==0.5.0", + "scikit-learn==1.5.2", + "fastapi==0.110.0", + "fastapi-pagination==0.12.23", + "apscheduler==3.10.4", + "eastmoneypy==0.1.7", + "orjson==3.10.3", + ], project_urls={ # Optional - 'Bug Reports': 'https://github.com/zvtvz/zvt/issues', - 'Funding': 'https://www.foolcage.com/zvt', - 'Say Thanks!': 'https://saythanks.io/to/foolcage', - 'Source': 'https://github.com/zvtvz/zvt', + "Bug Reports": "https://github.com/zvtvz/zvt/issues", + "Funding": "https://www.foolcage.com/zvt", + "Say Thanks!": "https://saythanks.io/to/foolcage", + "Source": "https://github.com/zvtvz/zvt", }, long_description_content_type="text/markdown", entry_points={ - 'console_scripts': [ - 'zvt = zvt.main:main', - 'zvt_plugin = zvt.plugin:main', - 'zvt_export = zvt.plugin:export', + "console_scripts": [ + "zvt = zvt.main:main", + "zvt_server = zvt.zvt_server:main", + "zvt_plugin = zvt.plugin:main", + "zvt_export = zvt.plugin:export", ], }, + license_file="LICENSE", ) diff --git a/sql/reduce_size.sql b/sql/reduce_size.sql new file mode 100644 index 00000000..1c00d7a3 --- /dev/null +++ b/sql/reduce_size.sql @@ -0,0 +1,6 @@ +-- k线数据去除索引,方便传输原始数据,重跑zvt会重建 +-- 再压缩一下,大小为原来的1/10 +drop index stock_1d_hfq_kdata_entity_id_index; +drop index stock_1d_hfq_kdata_code_index; +drop index stock_1d_hfq_kdata_timestamp_index; +VACUUM; \ No newline at end of file diff --git a/src/zvt/__init__.py b/src/zvt/__init__.py new file mode 100644 index 00000000..abd9a08f --- /dev/null +++ b/src/zvt/__init__.py @@ -0,0 +1,247 @@ +# -*- coding: utf-8 -*- +import importlib +import json +import logging +import os +import pkgutil +import pprint +import shutil +from logging.handlers import RotatingFileHandler +from typing import List + +import pandas as pd +import pkg_resources +from pkg_resources import get_distribution, DistributionNotFound + +from zvt.consts import DATA_SAMPLE_ZIP_PATH, ZVT_TEST_HOME, ZVT_HOME, ZVT_TEST_DATA_PATH, ZVT_TEST_ZIP_DATA_PATH + +try: + dist_name = __name__ + __version__ = get_distribution(dist_name).version +except DistributionNotFound: + __version__ = "unknown" +finally: + del get_distribution, DistributionNotFound + +logger = logging.getLogger(__name__) + + +def init_log(file_name="zvt.log", log_dir=None, simple_formatter=True): + if not log_dir: + log_dir = zvt_env["log_path"] + + root_logger = logging.getLogger() + + # reset the handlers + root_logger.handlers = [] + + root_logger.setLevel(logging.INFO) + + file_name = os.path.join(log_dir, file_name) + + file_log_handler = RotatingFileHandler(file_name, maxBytes=524288000, backupCount=10) + + file_log_handler.setLevel(logging.INFO) + + console_log_handler = logging.StreamHandler() + console_log_handler.setLevel(logging.INFO) + + # create formatter and add it to the handlers + if simple_formatter: + formatter = logging.Formatter("%(asctime)s %(levelname)s %(threadName)s %(message)s") + else: + formatter = logging.Formatter( + "%(asctime)s %(levelname)s %(threadName)s %(name)s:%(filename)s:%(lineno)s %(funcName)s %(message)s" + ) + file_log_handler.setFormatter(formatter) + console_log_handler.setFormatter(formatter) + + # add the handlers to the logger + root_logger.addHandler(file_log_handler) + root_logger.addHandler(console_log_handler) + + +os.environ.setdefault("SQLALCHEMY_WARN_20", "1") +pd.set_option("expand_frame_repr", False) +pd.set_option("mode.chained_assignment", "raise") +pd.set_option("display.max_rows", None) +pd.set_option("display.max_columns", None) + +zvt_env = {} + +zvt_config = {} + +_plugins = {} + + +def init_env(zvt_home: str, **kwargs) -> dict: + """ + init env + + :param zvt_home: home path for zvt + """ + data_path = os.path.join(zvt_home, "data") + resource_path = os.path.join(zvt_home, "resources") + tmp_path = os.path.join(zvt_home, "tmp") + if not os.path.exists(data_path): + os.makedirs(data_path) + + if not os.path.exists(resource_path): + os.makedirs(resource_path) + + if not os.path.exists(tmp_path): + os.makedirs(tmp_path) + + zvt_env["zvt_home"] = zvt_home + zvt_env["data_path"] = data_path + zvt_env["resource_path"] = resource_path + zvt_env["tmp_path"] = tmp_path + + # path for storing ui results + zvt_env["ui_path"] = os.path.join(zvt_home, "ui") + if not os.path.exists(zvt_env["ui_path"]): + os.makedirs(zvt_env["ui_path"]) + + # path for storing logs + zvt_env["log_path"] = os.path.join(zvt_home, "logs") + if not os.path.exists(zvt_env["log_path"]): + os.makedirs(zvt_env["log_path"]) + + init_log() + + pprint.pprint(zvt_env) + + init_resources(resource_path=resource_path) + # init config + init_config(current_config=zvt_config, **kwargs) + # init plugin + # init_plugins() + + return zvt_env + + +def init_resources(resource_path): + package_name = "zvt" + package_dir = pkg_resources.resource_filename(package_name, "resources") + from zvt.utils.file_utils import list_all_files + + files: List[str] = list_all_files(package_dir, ext=None) + for source_file in files: + dst_file = os.path.join(resource_path, source_file[len(package_dir) + 1 :]) + if not os.path.exists(dst_file): + shutil.copyfile(source_file, dst_file) + + +def init_config(pkg_name: str = None, current_config: dict = None, **kwargs) -> dict: + """ + init config + """ + + # create default config.json if not exist + if pkg_name: + config_file = f"{pkg_name}_config.json" + else: + pkg_name = "zvt" + config_file = "config.json" + + logger.info(f"init config for {pkg_name}, current_config:{current_config}") + + config_path = os.path.join(zvt_env["zvt_home"], config_file) + if not os.path.exists(config_path): + try: + sample_config = pkg_resources.resource_filename(pkg_name, "config.json") + if os.path.exists(sample_config): + shutil.copyfile(sample_config, config_path) + except Exception as e: + logger.warning(f"could not load config.json from package {pkg_name}") + + if os.path.exists(config_path): + with open(config_path) as f: + config_json = json.load(f) + for k in config_json: + current_config[k] = config_json[k] + + # set and save the config + for k in kwargs: + current_config[k] = kwargs[k] + with open(config_path, "w+") as outfile: + json.dump(current_config, outfile) + + pprint.pprint(current_config) + logger.info(f"current_config:{current_config}") + + return current_config + + +def init_plugins(): + for finder, name, ispkg in pkgutil.iter_modules(): + if name.startswith("zvt_"): + try: + _plugins[name] = importlib.import_module(name) + except Exception as e: + logger.warning(f"failed to load plugin {name}", e) + logger.info(f"loaded plugins:{_plugins}") + + +def old_db_to_provider_dir(data_path): + files = os.listdir(data_path) + for file in files: + if file.endswith(".db"): + # Split the file name to extract the provider + provider = file.split("_")[0] + + # Define the destination directory + destination_dir = os.path.join(data_path, provider) + + # Create the destination directory if it doesn't exist + if not os.path.exists(destination_dir): + os.makedirs(destination_dir) + + # Define the source and destination paths + source_path = os.path.join(data_path, file) + destination_path = os.path.join(destination_dir, file) + + # Move the file to the destination directory + if not os.path.exists(destination_path): + shutil.move(source_path, destination_path) + logger.info(f"Moved {file} to {destination_dir}") + + +if os.getenv("TESTING_ZVT"): + init_env(zvt_home=ZVT_TEST_HOME) + + # init the sample data if need + same = False + if os.path.exists(ZVT_TEST_ZIP_DATA_PATH): + import filecmp + + same = filecmp.cmp(ZVT_TEST_ZIP_DATA_PATH, DATA_SAMPLE_ZIP_PATH) + + if not same: + from zvt.contract import * + from zvt.utils.zip_utils import unzip + + shutil.copyfile(DATA_SAMPLE_ZIP_PATH, ZVT_TEST_ZIP_DATA_PATH) + unzip(ZVT_TEST_ZIP_DATA_PATH, ZVT_TEST_DATA_PATH) +else: + init_env(zvt_home=ZVT_HOME) + +old_db_to_provider_dir(zvt_env["data_path"]) + +# register to meta +import zvt.contract as zvt_contract +import zvt.recorders as zvt_recorders +import zvt.factors as zvt_factors + +import platform + +if platform.system() == "Windows": + try: + import zvt.recorders.qmt as qmt_recorder + except Exception as e: + logger.error("QMT not work", e) +else: + logger.warning("QMT need run in Windows!") + + +__all__ = ["zvt_env", "zvt_config", "init_log", "init_env", "init_config", "__version__"] diff --git a/src/zvt/api/__init__.py b/src/zvt/api/__init__.py new file mode 100644 index 00000000..2686fff0 --- /dev/null +++ b/src/zvt/api/__init__.py @@ -0,0 +1,5 @@ +# -*- coding: utf-8 -*- + + +# the __all__ is generated +__all__ = [] diff --git a/src/zvt/api/intent.py b/src/zvt/api/intent.py new file mode 100644 index 00000000..046607af --- /dev/null +++ b/src/zvt/api/intent.py @@ -0,0 +1,206 @@ +# -*- coding: utf-8 -*- +from typing import List + +import pandas as pd + +from zvt.api.kdata import get_kdata_schema +from zvt.contract.api import decode_entity_id +from zvt.contract.drawer import Drawer, ChartType +from zvt.utils.time_utils import to_pd_timestamp + + +def compare( + entity_ids=None, + codes=None, + schema=None, + columns=None, + schema_map_columns: dict = None, + chart_type: ChartType = ChartType.line, + start_timestamp=None, + scale_value: int = None, +): + """ + compare indicators(columns) of entities + + :param entity_ids: + :param codes: + :param schema: + :param columns: + :param schema_map_columns: key represents schema, value represents columns + :param chart_type: "line", "area", "scatter", default "line" + :param start_timestamp: " + :param scale_value: compare with same value which scaled to scale_value + """ + + dfs = [] + # default compare kdata + if schema_map_columns is None and schema is None: + entity_type_map_ids = _group_entity_ids(entity_ids=entity_ids) + for entity_type in entity_type_map_ids: + schema = get_kdata_schema(entity_type=entity_type) + df = schema.query_data(entity_ids=entity_type_map_ids.get(entity_type), start_timestamp=start_timestamp) + dfs.append(df) + all_df = pd.concat(dfs) + drawer = Drawer(main_df=all_df, sub_df_list=[all_df[["entity_id", "timestamp", "turnover"]].copy()]) + drawer.draw_kline(show=True, scale_value=scale_value) + else: + if schema_map_columns: + for schema in schema_map_columns: + columns = ["entity_id", "timestamp"] + schema_map_columns.get(schema) + df = schema.query_data( + entity_ids=entity_ids, codes=codes, columns=columns, start_timestamp=start_timestamp + ) + dfs.append(df) + elif schema: + columns = ["entity_id", "timestamp"] + columns + df = schema.query_data(entity_ids=entity_ids, codes=codes, columns=columns, start_timestamp=start_timestamp) + dfs.append(df) + + all_df = pd.concat(dfs) + drawer = Drawer(main_df=all_df) + drawer.draw(main_chart=chart_type, show=True, scale_value=scale_value) + + +def compare_df(df: pd.DataFrame, chart_type: ChartType = ChartType.line): + """ + compare indicators(columns) of entities in df + + :param df: normal df + :param chart_type: + """ + drawer = Drawer(main_df=df) + drawer.draw(main_chart=chart_type, show=True) + + +def distribute(data_schema, columns, entity_ids=None, codes=None, histnorm="percent", nbinsx=20, filters=None): + """ + distribute indicators(columns) of entities + + :param data_schema: + :param columns: + :param entity_ids: + :param codes: + :param histnorm: "percent", "probability", default "percent" + :param nbinsx: + :param filters: + """ + columns = ["entity_id", "timestamp"] + columns + df = data_schema.query_data(entity_ids=entity_ids, codes=codes, columns=columns, filters=filters) + if not entity_ids or codes: + df["entity_id"] = "entity_x_distribute" + distribute_df(df=df, histnorm=histnorm, nbinsx=nbinsx) + + +def distribute_df(df, histnorm="percent", nbinsx=20): + """ + distribute indicators(columns) of entities in df + + :param df: normal df + :param histnorm: "percent", "probability", default "percent" + :param nbinsx: + """ + drawer = Drawer(main_df=df) + drawer.draw_histogram(show=True, histnorm=histnorm, nbinsx=nbinsx) + + +def composite(entity_id, data_schema, columns, filters=None): + """ + composite indicators(columns) of entity + + :param entity_id: + :param data_schema: + :param columns: + :param filters: + """ + columns = ["entity_id", "timestamp"] + columns + df = data_schema.query_data(entity_id=entity_id, columns=columns, filters=filters) + composite_df(df=df) + + +def composite_df(df): + """ + composite indicators(columns) of entity in df + + :param df: + """ + drawer = Drawer(main_df=df) + drawer.draw_pie(show=True) + + +def composite_all(data_schema, column, timestamp, provider=None, entity_ids=None, filters=None): + if type(column) is not str: + column = column.name + if filters: + filters.append([data_schema.timestamp == to_pd_timestamp(timestamp)]) + else: + filters = [data_schema.timestamp == to_pd_timestamp(timestamp)] + df = data_schema.query_data( + provider=provider, + entity_ids=entity_ids, + columns=["entity_id", "timestamp", column], + filters=filters, + index="entity_id", + ) + entity_type, exchange, _ = decode_entity_id(df["entity_id"].iloc[0]) + pie_df = pd.DataFrame(columns=df.index, data=[df[column].tolist()]) + pie_df["entity_id"] = f"{entity_type}_{exchange}_{column}" + pie_df["timestamp"] = timestamp + + drawer = Drawer(main_df=pie_df) + drawer.draw_pie(show=True) + + +def _group_entity_ids(entity_ids): + entity_type_map_ids = {} + for entity_id in entity_ids: + entity_type, _, _ = decode_entity_id(entity_id) + ids: List = entity_type_map_ids.setdefault(entity_type, []) + ids.append(entity_id) + return entity_type_map_ids + + +if __name__ == "__main__": + # from zvt.domain import Index1wkKdata + # from zvt.api.intent import compare + # + # Index1wkKdata.record_data(provider="em", codes=["399370", "399371"]) + # df1 = Index1wkKdata.query_data(code="399371", index="timestamp") + # df2 = Index1wkKdata.query_data(code="399370", index="timestamp") + # se = df1["close"] / (df2["close"]) + # + # compare(se) + + from zvt.domain import CashFlowStatement + + # + # composite( + # entity_id="stock_sz_000338", + # data_schema=CashFlowStatement, + # columns=[ + # CashFlowStatement.net_op_cash_flows, + # CashFlowStatement.net_investing_cash_flows, + # CashFlowStatement.net_financing_cash_flows, + # ], + # filters=[ + # CashFlowStatement.report_period == "year", + # CashFlowStatement.report_date == to_pd_timestamp("2015-12-31"), + # ], + # ) + df = CashFlowStatement.query_data( + entity_id="stock_sz_000338", + columns=[ + CashFlowStatement.net_op_cash_flows, + CashFlowStatement.net_investing_cash_flows, + CashFlowStatement.net_financing_cash_flows, + ], + filters=[ + CashFlowStatement.report_period == "year", + CashFlowStatement.report_date == to_pd_timestamp("2015-12-31"), + ], + index="timestamp", + ) + composite_df(df=df) + + +# the __all__ is generated +__all__ = ["compare", "compare_df", "distribute", "distribute_df", "composite", "composite_df", "composite_all"] diff --git a/src/zvt/api/kdata.py b/src/zvt/api/kdata.py new file mode 100644 index 00000000..4b176eef --- /dev/null +++ b/src/zvt/api/kdata.py @@ -0,0 +1,215 @@ +# -*- coding: utf-8 -*- + +from typing import Union + +import numpy as np +import pandas as pd + +from zvt.contract import IntervalLevel, AdjustType, Mixin +from zvt.contract.api import decode_entity_id, get_schema_by_name +from zvt.domain import Index1dKdata +from zvt.utils.pd_utils import pd_is_not_null +from zvt.utils.time_utils import ( + to_time_str, + TIME_FORMAT_DAY, + TIME_FORMAT_ISO8601, + to_pd_timestamp, + date_time_by_interval, + current_date, +) + + +def get_trade_dates(start, end=None): + df = Index1dKdata.query_data( + entity_id="index_sh_000001", + provider="em", + columns=["timestamp"], + start_timestamp=start, + end_timestamp=end, + order=Index1dKdata.timestamp.asc(), + return_type="df", + ) + return df["timestamp"].tolist() + + +def get_recent_trade_dates(days_count=5): + max_start = date_time_by_interval(current_date(), -days_count - 15) + dates = get_trade_dates(start=max_start) + return dates[-days_count:] + + +def get_latest_kdata_date( + entity_type: str, + provider: str = None, + level: Union[IntervalLevel, str] = IntervalLevel.LEVEL_1DAY, + adjust_type: Union[AdjustType, str] = None, +) -> pd.Timestamp: + data_schema: Mixin = get_kdata_schema(entity_type, level=level, adjust_type=adjust_type) + + latest_data = data_schema.query_data( + provider=provider, order=data_schema.timestamp.desc(), limit=1, return_type="domain" + ) + return to_pd_timestamp(latest_data[0].timestamp) + + +def get_kdata_schema( + entity_type: str, + level: Union[IntervalLevel, str] = IntervalLevel.LEVEL_1DAY, + adjust_type: Union[AdjustType, str] = None, +) -> Mixin: + if type(level) == str: + level = IntervalLevel(level) + if type(adjust_type) == str: + adjust_type = AdjustType(adjust_type) + + # kdata schema rule + # name:{entity_type.capitalize()}{IntervalLevel.value.capitalize()}Kdata + if adjust_type and (adjust_type != AdjustType.qfq): + schema_str = "{}{}{}Kdata".format( + entity_type.capitalize(), + level.value.capitalize(), + adjust_type.value.capitalize(), + ) + else: + schema_str = "{}{}Kdata".format(entity_type.capitalize(), level.value.capitalize()) + return get_schema_by_name(schema_str) + + +def get_kdata( + entity_id=None, + entity_ids=None, + level=IntervalLevel.LEVEL_1DAY.value, + provider=None, + columns=None, + return_type="df", + start_timestamp=None, + end_timestamp=None, + filters=None, + session=None, + order=None, + limit=None, + index="timestamp", + drop_index_col=False, + adjust_type: AdjustType = None, +): + assert not entity_id or not entity_ids + if entity_ids: + entity_id = entity_ids[0] + else: + entity_ids = [entity_id] + + entity_type, exchange, code = decode_entity_id(entity_id) + data_schema: Mixin = get_kdata_schema(entity_type, level=level, adjust_type=adjust_type) + + return data_schema.query_data( + entity_ids=entity_ids, + level=level, + provider=provider, + columns=columns, + return_type=return_type, + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + filters=filters, + session=session, + order=order, + limit=limit, + index=index, + drop_index_col=drop_index_col, + ) + + +def default_adjust_type(entity_type: str) -> AdjustType: + """ + :type entity_type: entity type, e.g stock, stockhk, stockus + """ + if entity_type.lower().startswith("stock"): + return AdjustType.hfq + return AdjustType.qfq + + +def generate_kdata_id(entity_id, timestamp, level): + if level >= IntervalLevel.LEVEL_1DAY: + return "{}_{}".format(entity_id, to_time_str(timestamp, fmt=TIME_FORMAT_DAY)) + else: + return "{}_{}".format(entity_id, to_time_str(timestamp, fmt=TIME_FORMAT_ISO8601)) + + +def to_high_level_kdata(kdata_df: pd.DataFrame, to_level: IntervalLevel): + def to_close(s): + if pd_is_not_null(s): + return s[-1] + + def to_open(s): + if pd_is_not_null(s): + return s[0] + + def to_high(s): + return np.max(s) + + def to_low(s): + return np.min(s) + + def to_sum(s): + return np.sum(s) + + original_level = kdata_df["level"][0] + entity_id = kdata_df["entity_id"][0] + provider = kdata_df["provider"][0] + name = kdata_df["name"][0] + code = kdata_df["code"][0] + + entity_type, _, _ = decode_entity_id(entity_id=entity_id) + + assert IntervalLevel(original_level) <= IntervalLevel.LEVEL_1DAY + assert IntervalLevel(original_level) < IntervalLevel(to_level) + + df: pd.DataFrame = None + if to_level == IntervalLevel.LEVEL_1WEEK: + # loffset='-2' 用周五作为时间标签 + if entity_type == "stock": + df = kdata_df.resample("W", offset=pd.Timedelta(days=-2)).apply( + { + "close": to_close, + "open": to_open, + "high": to_high, + "low": to_low, + "volume": to_sum, + "turnover": to_sum, + } + ) + else: + df = kdata_df.resample("W", offset=pd.Timedelta(days=-2)).apply( + { + "close": to_close, + "open": to_open, + "high": to_high, + "low": to_low, + "volume": to_sum, + "turnover": to_sum, + } + ) + df = df.dropna() + # id entity_id timestamp provider code name level + df["entity_id"] = entity_id + df["provider"] = provider + df["code"] = code + df["name"] = name + + return df + + +if __name__ == "__main__": + print(get_recent_trade_dates()) + + +# the __all__ is generated +__all__ = [ + "get_trade_dates", + "get_recent_trade_dates", + "get_latest_kdata_date", + "get_kdata_schema", + "get_kdata", + "default_adjust_type", + "generate_kdata_id", + "to_high_level_kdata", +] diff --git a/src/zvt/api/portfolio.py b/src/zvt/api/portfolio.py new file mode 100644 index 00000000..ab1ae9d0 --- /dev/null +++ b/src/zvt/api/portfolio.py @@ -0,0 +1,107 @@ +# -*- coding: utf-8 -*- + +from typing import List + +import pandas as pd + +from zvt.api.utils import get_recent_report_date +from zvt.contract import PortfolioStockHistory +from zvt.contract.api import get_schema_by_name +from zvt.domain import ReportPeriod, Fund, Etf +from zvt.utils.time_utils import to_pd_timestamp, now_pd_timestamp + + +def portfolio_relate_stock(df, portfolio): + df["entity_id"] = portfolio.entity_id + df["entity_type"] = portfolio.entity_type + df["exchange"] = portfolio.exchange + df["code"] = portfolio.code + df["name"] = portfolio.name + + return df + + +# 季报只有前十大持仓,半年报和年报才有全量的持仓信息,故根据离timestamp最近的报表(年报 or 半年报)来确定持仓 +def get_portfolio_stocks( + portfolio_entity=Fund, + code=None, + codes=None, + ids=None, + timestamp=now_pd_timestamp(), + provider=None, +): + portfolio_stock = f"{portfolio_entity.__name__}Stock" + data_schema: PortfolioStockHistory = get_schema_by_name(portfolio_stock) + latests: List[PortfolioStockHistory] = data_schema.query_data( + provider=provider, + code=code, + end_timestamp=timestamp, + order=data_schema.timestamp.desc(), + limit=1, + return_type="domain", + ) + if latests: + latest_record = latests[0] + # 获取最新的报表 + df = data_schema.query_data( + provider=provider, + code=code, + codes=codes, + ids=ids, + end_timestamp=timestamp, + filters=[data_schema.report_date == latest_record.report_date], + ) + # 最新的为年报或者半年报 + if latest_record.report_period == ReportPeriod.year or latest_record.report_period == ReportPeriod.half_year: + return df + # 季报,需要结合 年报或半年报 来算持仓 + else: + step = 0 + while step <= 20: + report_date = get_recent_report_date(latest_record.report_date, step=step) + + pre_df = data_schema.query_data( + provider=provider, + code=code, + codes=codes, + ids=ids, + end_timestamp=timestamp, + filters=[data_schema.report_date == to_pd_timestamp(report_date)], + ) + # df = df.append(pre_df) + df = pd.concat([df, pre_df]) + + # 半年报和年报 + if (ReportPeriod.half_year.value in pre_df["report_period"].tolist()) or ( + ReportPeriod.year.value in pre_df["report_period"].tolist() + ): + # 保留最新的持仓 + df = df.drop_duplicates(subset=["stock_code"], keep="first") + return df + step = step + 1 + + +def get_etf_stocks(code=None, codes=None, ids=None, timestamp=now_pd_timestamp(), provider=None): + return get_portfolio_stocks( + portfolio_entity=Etf, + code=code, + codes=codes, + ids=ids, + timestamp=timestamp, + provider=provider, + ) + + +def get_fund_stocks(code=None, codes=None, ids=None, timestamp=now_pd_timestamp(), provider=None): + return get_portfolio_stocks( + portfolio_entity=Fund, + code=code, + codes=codes, + ids=ids, + timestamp=timestamp, + provider=provider, + ) + + +# the __all__ is generated +__all__ = ["portfolio_relate_stock", "get_portfolio_stocks", "get_etf_stocks", "get_fund_stocks"] diff --git a/src/zvt/api/selector.py b/src/zvt/api/selector.py new file mode 100644 index 00000000..ad5364e6 --- /dev/null +++ b/src/zvt/api/selector.py @@ -0,0 +1,415 @@ +# -*- coding: utf-8 -*- +import logging + +import pandas as pd +from sqlalchemy import or_, and_ + +from zvt.api.kdata import default_adjust_type, get_kdata_schema, get_latest_kdata_date +from zvt.contract import IntervalLevel, AdjustType +from zvt.contract.api import get_entity_ids +from zvt.domain import DragonAndTiger, Stock1dHfqKdata, Stock, LimitUpInfo, StockQuote, StockQuoteLog +from zvt.utils.pd_utils import pd_is_not_null +from zvt.utils.time_utils import to_pd_timestamp, date_time_by_interval, current_date, now_timestamp + +logger = logging.getLogger(__name__) + +# 500亿 +BIG_CAP = 50000000000 +# 150亿 +MIDDLE_CAP = 15000000000 +# 40亿 +SMALL_CAP = 4000000000 + +# 买入榜单 +IN_DEPS = ["dep1", "dep2", "dep3", "dep4", "dep5"] +# 卖出入榜单 +OUT_DEPS = ["dep_1", "dep_2", "dep_3", "dep_4", "dep_5"] + + +def get_entity_ids_by_filter( + provider="em", + ignore_delist=True, + ignore_st=True, + ignore_new_stock=False, + target_date=None, + entity_schema=Stock, + entity_ids=None, + ignore_bj=False, +): + filters = [] + if ignore_new_stock: + if not target_date: + target_date = current_date() + pre_year = date_time_by_interval(target_date, -365) + filters += [entity_schema.timestamp <= pre_year] + else: + if target_date: + filters += [entity_schema.timestamp <= target_date] + if ignore_delist: + filters += [ + entity_schema.name.not_like("%退%"), + entity_schema.name.not_like("%PT%"), + ] + + if ignore_st: + filters += [ + entity_schema.name.not_like("%ST%"), + entity_schema.name.not_like("%*ST%"), + ] + if ignore_bj: + filters += [entity_schema.exchange != "bj"] + + return get_entity_ids(provider=provider, entity_schema=entity_schema, filters=filters, entity_ids=entity_ids) + + +def get_limit_up_stocks(timestamp): + df = LimitUpInfo.query_data(start_timestamp=timestamp, end_timestamp=timestamp, columns=[LimitUpInfo.entity_id]) + if pd_is_not_null(df): + return df["entity_id"].tolist() + + +def get_dragon_and_tigger_player(start_timestamp, end_timestamp=None, direction="in"): + assert direction in ("in", "out") + + filters = None + if direction == "in": + filters = [DragonAndTiger.change_pct > 0] + columns = ["dep1", "dep2", "dep3"] + elif direction == "out": + filters = [DragonAndTiger.change_pct > 0] + columns = ["dep_1", "dep_2", "dep_3"] + + df = DragonAndTiger.query_data(start_timestamp=start_timestamp, end_timestamp=end_timestamp, filters=filters) + counts = [] + for col in columns: + counts.append(df[[col, f"{col}_rate"]].groupby(col).count().sort_values(f"{col}_rate", ascending=False)) + return counts + + +def get_big_players(start_timestamp, end_timestamp=None, count=40): + dep1, dep2, dep3 = get_dragon_and_tigger_player(start_timestamp=start_timestamp, end_timestamp=end_timestamp) + # 榜1前40 + bang1 = dep1.index.tolist()[:count] + + # 榜2前40 + bang2 = dep2.index.tolist()[:count] + + # 榜3前40 + bang3 = dep3.index.tolist()[:count] + + return list(set(bang1 + bang2 + bang3)) + + +def get_player_performance(start_timestamp, end_timestamp=None, days=5, players="机构专用", provider="em", buy_rate=5): + filters = [] + if isinstance(players, str): + players = [players] + + if isinstance(players, list): + for player in players: + filters.append( + or_( + and_(DragonAndTiger.dep1 == player, DragonAndTiger.dep1_rate >= buy_rate), + and_(DragonAndTiger.dep2 == player, DragonAndTiger.dep2_rate >= buy_rate), + and_(DragonAndTiger.dep3 == player, DragonAndTiger.dep3_rate >= buy_rate), + and_(DragonAndTiger.dep4 == player, DragonAndTiger.dep4_rate >= buy_rate), + and_(DragonAndTiger.dep5 == player, DragonAndTiger.dep5_rate >= buy_rate), + ) + ) + else: + raise AssertionError("players should be list or str type") + + df = DragonAndTiger.query_data( + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + filters=filters, + index=["entity_id", "timestamp"], + provider=provider, + ) + df = df[~df.index.duplicated(keep="first")] + records = [] + for entity_id, timestamp in df.index: + end_date = date_time_by_interval(timestamp, days + round(days + days * 2 / 5 + 30)) + kdata = Stock1dHfqKdata.query_data( + entity_id=entity_id, + start_timestamp=timestamp, + end_timestamp=end_date, + provider=provider, + index="timestamp", + ) + if len(kdata) <= days: + logger.warning(f"ignore {timestamp} -> end_timestamp: {end_date}") + break + close = kdata["close"] + change_pct = (close[days] - close[0]) / close[0] + records.append({"entity_id": entity_id, "timestamp": timestamp, f"change_pct": change_pct}) + return pd.DataFrame.from_records(records) + + +def get_player_success_rate( + start_timestamp, + end_timestamp=None, + intervals=(3, 5, 10, 60), + players=("机构专用", "东方财富证券股份有限公司拉萨团结路第二证券营业部"), + provider="em", +): + records = [] + for player in players: + record = {"player": player} + for days in intervals: + df = get_player_performance( + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + days=days, + players=player, + provider=provider, + ) + rate = len(df[df["change_pct"] > 0]) / len(df) + record[f"rate_{days}"] = rate + records.append(record) + return pd.DataFrame.from_records(records, index="player") + + +def get_players(entity_id, start_timestamp, end_timestamp, provider="em", direction="in", buy_rate=5): + columns = ["entity_id", "timestamp"] + if direction == "in": + for i in range(5): + columns.append(f"dep{i + 1}") + columns.append(f"dep{i + 1}_rate") + elif direction == "out": + for i in range(5): + columns.append(f"dep_{i + 1}") + columns.append(f"dep_{i + 1}_rate") + + df = DragonAndTiger.query_data( + entity_id=entity_id, + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + provider=provider, + columns=columns, + index=["entity_id", "timestamp"], + ) + dfs = [] + if direction == "in": + for i in range(5): + p_df = df[[f"dep{i + 1}", f"dep{i + 1}_rate"]].copy() + p_df.columns = ["player", "buy_rate"] + dfs.append(p_df) + elif direction == "out": + for i in range(5): + p_df = df[[f"dep_{i + 1}", f"dep_{i + 1}_rate"]].copy() + p_df.columns = ["player", "buy_rate"] + dfs.append(p_df) + + player_df = pd.concat(dfs, sort=True) + return player_df.sort_index(level=[0, 1]) + + +def get_good_players(timestamp=current_date(), recent_days=400, intervals=(3, 5, 10)): + end_timestamp = date_time_by_interval(timestamp, -intervals[-1] - 30) + # recent year + start_timestamp = date_time_by_interval(end_timestamp, -recent_days) + print(f"{start_timestamp} to {end_timestamp}") + # 最近一年牛x的营业部 + players = get_big_players(start_timestamp=start_timestamp, end_timestamp=end_timestamp) + logger.info(players) + df = get_player_success_rate( + start_timestamp=start_timestamp, end_timestamp=end_timestamp, intervals=intervals, players=players + ) + good_players = df[(df["rate_3"] > 0.4) & (df["rate_5"] > 0.3) & (df["rate_10"] > 0.3)].index.tolist() + return good_players + + +def get_entity_list_by_cap( + timestamp, cap_start, cap_end, entity_type="stock", provider=None, adjust_type=None, retry_times=20 +): + if not adjust_type: + adjust_type = default_adjust_type(entity_type=entity_type) + + kdata_schema = get_kdata_schema(entity_type, level=IntervalLevel.LEVEL_1DAY, adjust_type=adjust_type) + df = kdata_schema.query_data( + provider=provider, + filters=[kdata_schema.timestamp == to_pd_timestamp(timestamp)], + index="entity_id", + ) + if pd_is_not_null(df): + df["cap"] = df["turnover"] / df["turnover_rate"] + df_result = df.copy() + if cap_start: + df_result = df_result.loc[(df["cap"] >= cap_start)] + if cap_end: + df_result = df_result.loc[(df["cap"] <= cap_end)] + return df_result.index.tolist() + else: + if retry_times == 0: + return [] + return get_entity_list_by_cap( + timestamp=date_time_by_interval(timestamp, 1), + cap_start=cap_start, + cap_end=cap_end, + entity_type=entity_type, + provider=provider, + adjust_type=adjust_type, + retry_times=retry_times - 1, + ) + + +def get_big_cap_stock(timestamp, provider="em"): + return get_entity_list_by_cap( + timestamp=timestamp, cap_start=BIG_CAP, cap_end=None, entity_type="stock", provider=provider + ) + + +def get_middle_cap_stock(timestamp, provider="em"): + return get_entity_list_by_cap( + timestamp=timestamp, cap_start=MIDDLE_CAP, cap_end=BIG_CAP, entity_type="stock", provider=provider + ) + + +def get_small_cap_stock(timestamp, provider="em"): + return get_entity_list_by_cap( + timestamp=timestamp, cap_start=SMALL_CAP, cap_end=MIDDLE_CAP, entity_type="stock", provider=provider + ) + + +def get_mini_cap_stock(timestamp, provider="em"): + return get_entity_list_by_cap( + timestamp=timestamp, cap_start=None, cap_end=SMALL_CAP, entity_type="stock", provider=provider + ) + + +def get_mini_and_small_stock(timestamp, provider="em"): + return get_entity_list_by_cap( + timestamp=timestamp, cap_start=None, cap_end=MIDDLE_CAP, entity_type="stock", provider=provider + ) + + +def get_middle_and_big_stock(timestamp, provider="em"): + return get_entity_list_by_cap( + timestamp=timestamp, cap_start=MIDDLE_CAP, cap_end=None, entity_type="stock", provider=provider + ) + + +def get_limit_up_today(): + df = StockQuote.query_data(filters=[StockQuote.is_limit_up], columns=[StockQuote.entity_id]) + if pd_is_not_null(df): + return df["entity_id"].to_list() + + +def get_top_up_today(n=100): + df = StockQuote.query_data(columns=[StockQuote.entity_id], order=StockQuote.change_pct.desc(), limit=n) + if pd_is_not_null(df): + return df["entity_id"].to_list() + + +def get_shoot_today(up_change_pct=0.03, down_change_pct=-0.03, interval=2): + current_time = now_timestamp() + latest = StockQuoteLog.query_data( + columns=[StockQuoteLog.time], return_type="df", limit=1, order=StockQuoteLog.time.desc() + ) + latest_time = int(latest["time"][0]) + print(latest_time) + + delay = (current_time - latest_time) / (60 * 1000) + if delay > 2: + logger.warning(f"delay {delay} minutes") + + # interval minutes + start_time = latest_time - (interval * 60 * 1000) + filters = [StockQuoteLog.time > start_time] + df = StockQuoteLog.query_data( + filters=filters, columns=[StockQuoteLog.entity_id, StockQuoteLog.time, StockQuoteLog.price], return_type="df" + ) + if pd_is_not_null(df): + df.sort_values(by=["entity_id", "time"], inplace=True) + + g_df = df.groupby("entity_id").agg( + first_price=("price", "first"), + last_price=("price", "last"), + last_time=("time", "last"), + change_pct=("price", lambda x: (x.iloc[-1] - x.iloc[0]) / x.iloc[0]), + ) + print(g_df.sort_values(by=["change_pct"])) + up = g_df[g_df["change_pct"] > up_change_pct] + down = g_df[g_df["change_pct"] < down_change_pct] + return up.index.tolist(), down.index.tolist() + + +def get_top_vol( + entity_ids, + target_date=None, + limit=500, + provider="qmt", +): + if provider == "qmt": + df = StockQuote.query_data( + entity_ids=entity_ids, + columns=[StockQuote.entity_id], + order=StockQuote.turnover.desc(), + limit=limit, + ) + return df["entity_id"].to_list() + else: + if not target_date: + target_date = get_latest_kdata_date(provider="em", entity_type="stock", adjust_type=AdjustType.hfq) + df = Stock1dHfqKdata.query_data( + provider="em", + filters=[Stock1dHfqKdata.timestamp == to_pd_timestamp(target_date)], + entity_ids=entity_ids, + columns=[Stock1dHfqKdata.entity_id], + order=Stock1dHfqKdata.turnover.desc(), + limit=limit, + ) + return df["entity_id"].to_list() + + +def get_top_down_today(n=100): + df = StockQuote.query_data(columns=[StockQuote.entity_id], order=StockQuote.change_pct.asc(), limit=n) + if pd_is_not_null(df): + return df["entity_id"].to_list() + + +def get_limit_down_today(): + df = StockQuote.query_data(filters=[StockQuote.is_limit_down], columns=[StockQuote.entity_id]) + if pd_is_not_null(df): + return df["entity_id"].to_list() + + +def get_high_days_count(entity_ids=None, target_date=current_date(), days=10): + recent_days = date_time_by_interval(target_date, -days) + df = LimitUpInfo.query_data( + entity_ids=entity_ids, + start_timestamp=recent_days, + columns=[LimitUpInfo.timestamp, LimitUpInfo.entity_id, LimitUpInfo.high_days, LimitUpInfo.high_days_count], + ) + df_sorted = df.sort_values(by=["entity_id", "timestamp"]) + df_latest = df_sorted.drop_duplicates(subset="entity_id", keep="last").reset_index(drop=True) + + entity_id_to_high_days_map = df_latest.set_index("entity_id")["high_days"].to_dict() + return entity_id_to_high_days_map + + +if __name__ == "__main__": + # stocks = get_top_vol(entity_ids=None, provider="em") + # assert len(stocks) == 500 + print(get_high_days_count()) + + +# the __all__ is generated +__all__ = [ + "get_entity_ids_by_filter", + "get_limit_up_stocks", + "get_dragon_and_tigger_player", + "get_big_players", + "get_player_performance", + "get_player_success_rate", + "get_players", + "get_good_players", + "get_entity_list_by_cap", + "get_big_cap_stock", + "get_middle_cap_stock", + "get_small_cap_stock", + "get_mini_cap_stock", + "get_mini_and_small_stock", + "get_middle_and_big_stock", +] diff --git a/src/zvt/api/stats.py b/src/zvt/api/stats.py new file mode 100644 index 00000000..2c518af9 --- /dev/null +++ b/src/zvt/api/stats.py @@ -0,0 +1,601 @@ +# -*- coding: utf-8 -*- +import enum +import itertools +import logging +from typing import Union + +import pandas as pd + +from zvt.api.kdata import get_kdata_schema, default_adjust_type, get_latest_kdata_date, get_trade_dates +from zvt.api.selector import get_entity_ids_by_filter +from zvt.api.utils import get_recent_report_date +from zvt.contract import Mixin, AdjustType +from zvt.contract.api import decode_entity_id, get_entity_schema, get_entity_ids +from zvt.contract.drawer import Drawer +from zvt.domain import FundStock, StockValuation, BlockStock, Block +from zvt.utils.pd_utils import pd_is_not_null +from zvt.utils.time_utils import ( + month_start_end_ranges, + to_time_str, + is_same_date, + now_pd_timestamp, + date_time_by_interval, +) + +logger = logging.getLogger(__name__) + + +class WindowMethod(enum.Enum): + change = "change" + avg = "avg" + sum = "sum" + + +class TopType(enum.Enum): + positive = "positive" + negative = "negative" + + +def get_top_performance_by_month( + entity_type="stock", + start_timestamp="2015-01-01", + end_timestamp=now_pd_timestamp(), + list_days=None, + data_provider=None, +): + ranges = month_start_end_ranges(start_date=start_timestamp, end_date=end_timestamp) + + for month_range in ranges: + start_timestamp = month_range[0] + end_timestamp = month_range[1] + top, _ = get_top_performance_entities( + entity_type=entity_type, + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + list_days=list_days, + data_provider=data_provider, + ) + + yield start_timestamp, end_timestamp, top + + +def get_top_performance_entities_by_periods( + entity_provider, + data_provider, + target_date=None, + periods=None, + ignore_new_stock=True, + ignore_st=True, + entity_ids=None, + entity_type="stock", + adjust_type=None, + top_count=50, + turnover_threshold=100000000, + turnover_rate_threshold=0.02, + return_type=TopType.positive, +): + if periods is None: + periods = [*range(1, 21)] + if not adjust_type: + adjust_type = default_adjust_type(entity_type=entity_type) + kdata_schema = get_kdata_schema(entity_type=entity_type, adjust_type=adjust_type) + entity_schema = get_entity_schema(entity_type=entity_type) + + if not target_date: + target_date = get_latest_kdata_date(provider=data_provider, entity_type=entity_type, adjust_type=adjust_type) + + filter_entity_ids = get_entity_ids_by_filter( + provider=entity_provider, + ignore_st=ignore_st, + ignore_new_stock=ignore_new_stock, + entity_schema=entity_schema, + target_date=target_date, + entity_ids=entity_ids, + ) + + if not filter_entity_ids: + return [] + + filter_turnover_df = kdata_schema.query_data( + filters=[ + kdata_schema.turnover >= turnover_threshold, + kdata_schema.turnover_rate >= turnover_rate_threshold, + ], + provider=data_provider, + start_timestamp=date_time_by_interval(target_date, -7), + end_timestamp=target_date, + index="entity_id", + columns=["entity_id", "code"], + ) + if filter_entity_ids: + filter_entity_ids = set(filter_entity_ids) & set(filter_turnover_df.index.tolist()) + else: + filter_entity_ids = filter_turnover_df.index.tolist() + + if not filter_entity_ids: + return [] + + logger.info(f"{entity_type} filter_entity_ids size: {len(filter_entity_ids)}") + filters = [kdata_schema.entity_id.in_(filter_entity_ids)] + selected = [] + current_start = None + real_period = 1 + for i, period in enumerate(periods): + real_period = max(real_period, period) + while True: + start = date_time_by_interval(target_date, -real_period) + trade_days = get_trade_dates(start=start, end=target_date) + if not trade_days: + logger.info(f"no trade days in: {start} to {target_date}") + real_period = real_period + 1 + continue + if current_start and is_same_date(current_start, trade_days[0]): + logger.info("ignore same trade days") + real_period = real_period + 1 + continue + break + current_start = trade_days[0] + current_end = trade_days[-1] + + logger.info(f"trade days in: {current_start} to {current_end}, real_period: {real_period} ") + positive_df, negative_df = get_top_performance_entities( + entity_type=entity_type, + start_timestamp=current_start, + end_timestamp=current_end, + kdata_filters=filters, + pct=1, + show_name=True, + entity_provider=entity_provider, + data_provider=data_provider, + return_type=return_type, + ) + + if return_type == TopType.positive: + df = positive_df + else: + df = negative_df + if pd_is_not_null(df): + selected = selected + df.index[:top_count].tolist() + selected = list(dict.fromkeys(selected)) + return selected, real_period + + +def get_top_performance_entities( + entity_type="stock", + start_timestamp=None, + end_timestamp=None, + pct=0.1, + return_type=None, + adjust_type: Union[AdjustType, str] = None, + entity_filters=None, + kdata_filters=None, + show_name=False, + list_days=None, + entity_provider=None, + data_provider=None, +): + if not adjust_type: + adjust_type = default_adjust_type(entity_type=entity_type) + data_schema = get_kdata_schema(entity_type=entity_type, adjust_type=adjust_type) + + if not entity_filters: + entity_filters = [] + if list_days: + entity_schema = get_entity_schema(entity_type=entity_type) + list_date = date_time_by_interval(start_timestamp, -list_days) + entity_filters += [entity_schema.list_date <= list_date] + + filter_entities = get_entity_ids( + provider=entity_provider, + entity_type=entity_type, + filters=entity_filters, + ) + if not filter_entities: + logger.warning(f"no entities selected") + return None, None + + if not kdata_filters: + kdata_filters = [] + kdata_filters = kdata_filters + [data_schema.entity_id.in_(filter_entities)] + + return get_top_entities( + data_schema=data_schema, + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + column="close", + pct=pct, + method=WindowMethod.change, + return_type=return_type, + kdata_filters=kdata_filters, + show_name=show_name, + data_provider=data_provider, + ) + + +def get_top_fund_holding_stocks(timestamp=None, pct=0.3, by=None): + if not timestamp: + timestamp = now_pd_timestamp() + # 季报一般在report_date后1个月内公布,年报2个月内,年报4个月内 + # 所以取时间点的最近的两个公布点,保证取到数据 + # 所以,这是个滞后的数据,只是为了看个大概,毕竟模糊的正确better than 精确的错误 + report_date = get_recent_report_date(timestamp, 1) + fund_cap_df = FundStock.query_data( + filters=[ + FundStock.report_date >= report_date, + FundStock.timestamp <= timestamp, + ], + columns=["stock_id", "market_cap"], + ) + fund_cap_df = fund_cap_df.groupby("stock_id")["market_cap"].sum().sort_values(ascending=False) + + # 直接根据持有市值返回 + if not by: + s = fund_cap_df.iloc[: int(len(fund_cap_df) * pct)] + + return s.to_frame() + + # 按流通盘比例 + if by == "trading": + columns = ["entity_id", "circulating_market_cap"] + # 按市值比例 + elif by == "all": + columns = ["entity_id", "market_cap"] + + entity_ids = fund_cap_df.index.tolist() + start_timestamp = date_time_by_interval(timestamp, -30) + cap_df = StockValuation.query_data( + entity_ids=entity_ids, + filters=[ + StockValuation.timestamp >= start_timestamp, + StockValuation.timestamp <= timestamp, + ], + columns=columns, + ) + if by == "trading": + cap_df = cap_df.rename(columns={"circulating_market_cap": "cap"}) + elif by == "all": + cap_df = cap_df.rename(columns={"market_cap": "cap"}) + + cap_df = cap_df.groupby("entity_id").mean() + result_df = pd.concat([cap_df, fund_cap_df], axis=1, join="inner") + result_df["pct"] = result_df["market_cap"] / result_df["cap"] + + pct_df = result_df["pct"].sort_values(ascending=False) + + s = pct_df.iloc[: int(len(pct_df) * pct)] + + return s.to_frame() + + +def get_performance( + entity_ids, + start_timestamp=None, + end_timestamp=None, + adjust_type: Union[AdjustType, str] = None, + data_provider=None, +): + entity_type, _, _ = decode_entity_id(entity_ids[0]) + if not adjust_type: + adjust_type = default_adjust_type(entity_type=entity_type) + data_schema = get_kdata_schema(entity_type=entity_type, adjust_type=adjust_type) + + result, _ = get_top_entities( + data_schema=data_schema, + column="close", + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + pct=1, + method=WindowMethod.change, + return_type=TopType.positive, + kdata_filters=[data_schema.entity_id.in_(entity_ids)], + data_provider=data_provider, + ) + return result + + +def get_performance_stats_by_month( + entity_type="stock", + start_timestamp="2015-01-01", + end_timestamp=now_pd_timestamp(), + adjust_type: Union[AdjustType, str] = None, + data_provider=None, +): + ranges = month_start_end_ranges(start_date=start_timestamp, end_date=end_timestamp) + + month_stats = {} + for month_range in ranges: + start_timestamp = month_range[0] + end_timestamp = month_range[1] + logger.info(f"calculate range [{start_timestamp}, {end_timestamp}]") + stats = get_performance_stats( + entity_type=entity_type, + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + adjust_type=adjust_type, + data_provider=data_provider, + ) + if stats: + month_stats[f"{to_time_str(start_timestamp)}"] = stats + + return pd.DataFrame.from_dict(data=month_stats, orient="index") + + +def get_performance_stats( + entity_type="stock", + start_timestamp=None, + end_timestamp=None, + adjust_type: Union[AdjustType, str] = None, + data_provider=None, + changes=((-1, -0.5), (-0.5, -0.2), (-0.2, 0), (0, 0.2), (0.2, 0.5), (0.5, 1), (1, 1000)), +): + if not adjust_type: + adjust_type = default_adjust_type(entity_type=entity_type) + data_schema = get_kdata_schema(entity_type=entity_type, adjust_type=adjust_type) + + score_df, _ = get_top_entities( + data_schema=data_schema, + column="close", + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + pct=1, + method=WindowMethod.change, + return_type=TopType.positive, + data_provider=data_provider, + ) + + if pd_is_not_null(score_df): + result = {} + for change in changes: + range_start = change[0] + range_end = change[1] + key = f"pct_{range_start}_{range_end}" + df = score_df[(score_df["score"] >= range_start) & (score_df["score"] < range_end)] + result[key] = len(df) + return result + + +def get_top_volume_entities( + entity_type="stock", + entity_ids=None, + start_timestamp=None, + end_timestamp=None, + pct=0.1, + return_type=TopType.positive, + adjust_type: Union[AdjustType, str] = None, + method=WindowMethod.avg, + data_provider=None, + threshold=None, +): + if not adjust_type: + adjust_type = default_adjust_type(entity_type=entity_type) + + data_schema = get_kdata_schema(entity_type=entity_type, adjust_type=adjust_type) + + filters = [] + if entity_ids: + filters.append(data_schema.entity_id.in_(entity_ids)) + if threshold: + filters.append(data_schema.turnover >= threshold) + + result, _ = get_top_entities( + data_schema=data_schema, + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + column="turnover", + pct=pct, + method=method, + return_type=return_type, + kdata_filters=filters, + data_provider=data_provider, + ) + return result + + +def get_top_turnover_rate_entities( + entity_type="stock", + entity_ids=None, + start_timestamp=None, + end_timestamp=None, + pct=0.1, + return_type=TopType.positive, + adjust_type: Union[AdjustType, str] = None, + method=WindowMethod.avg, + data_provider=None, + threshold=None, +): + if not adjust_type: + adjust_type = default_adjust_type(entity_type=entity_type) + + data_schema = get_kdata_schema(entity_type=entity_type, adjust_type=adjust_type) + + filters = [] + if entity_ids: + filters.append(data_schema.entity_id.in_(entity_ids)) + if threshold: + filters.append(data_schema.turnover_rate >= threshold) + + result, _ = get_top_entities( + data_schema=data_schema, + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + column="turnover_rate", + pct=pct, + method=method, + return_type=return_type, + kdata_filters=filters, + data_provider=data_provider, + ) + return result + + +def get_top_entities( + data_schema: Mixin, + column: str, + start_timestamp=None, + end_timestamp=None, + pct=0.1, + method: WindowMethod = WindowMethod.change, + return_type: TopType = None, + kdata_filters=None, + show_name=False, + data_provider=None, +): + """ + get top entities in specific domain between time range + + :param data_schema: schema in domain + :param column: schema column + :param start_timestamp: + :param end_timestamp: + :param pct: range (0,1] + :param method: + :param return_type: + :param entity_filters: + :param kdata_filters: + :param show_name: show entity name + :return: + """ + if type(method) == str: + method = WindowMethod(method) + + if type(return_type) == str: + return_type = TopType(return_type) + + if show_name: + columns = ["entity_id", column, "name"] + else: + columns = ["entity_id", column] + + all_df = data_schema.query_data( + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + columns=columns, + filters=kdata_filters, + provider=data_provider, + ) + if not pd_is_not_null(all_df): + return None, None + g = all_df.groupby("entity_id") + tops = {} + names = {} + for entity_id, df in g: + if method == WindowMethod.change: + start = df[column].iloc[0] + end = df[column].iloc[-1] + if start != 0: + change = (end - start) / abs(start) + else: + change = 0 + tops[entity_id] = change + elif method == WindowMethod.avg: + tops[entity_id] = df[column].mean() + elif method == WindowMethod.sum: + tops[entity_id] = df[column].sum() + + if show_name: + names[entity_id] = df["name"].iloc[0] + + positive_df = None + negative_df = None + top_index = int(len(tops) * pct) + if return_type is None or return_type == TopType.positive: + # from big to small + positive_tops = {k: v for k, v in sorted(tops.items(), key=lambda item: item[1], reverse=True)} + positive_tops = dict(itertools.islice(positive_tops.items(), top_index)) + positive_df = pd.DataFrame.from_dict(positive_tops, orient="index") + + col = "score" + positive_df.columns = [col] + positive_df.sort_values(by=col, ascending=False) + if return_type is None or return_type == TopType.negative: + # from small to big + negative_tops = {k: v for k, v in sorted(tops.items(), key=lambda item: item[1])} + negative_tops = dict(itertools.islice(negative_tops.items(), top_index)) + negative_df = pd.DataFrame.from_dict(negative_tops, orient="index") + + col = "score" + negative_df.columns = [col] + negative_df.sort_values(by=col) + + if names: + if pd_is_not_null(positive_df): + positive_df["name"] = positive_df.index.map(lambda x: names[x]) + if pd_is_not_null(negative_df): + negative_df["name"] = negative_df.index.map(lambda x: names[x]) + return positive_df, negative_df + + +def show_month_performance(): + dfs = [] + for timestamp, df in get_top_performance_by_month(start_timestamp="2005-01-01", list_days=250): + if pd_is_not_null(df): + df = df.reset_index(drop=True) + df["entity_id"] = "stock_cn_performance" + df["timestamp"] = timestamp + dfs.append(df) + + all_df = pd.concat(dfs) + print(all_df) + + drawer = Drawer(main_df=all_df) + drawer.draw_scatter(show=True) + + +def show_industry_composition(entity_ids, timestamp): + block_df = Block.query_data(provider="eastmoney", filters=[Block.category == "industry"], index="entity_id") + block_ids = block_df.index.tolist() + + block_df = BlockStock.query_data(entity_ids=block_ids, filters=[BlockStock.stock_id.in_(entity_ids)]) + + s = block_df["name"].value_counts() + + cycle_df = pd.DataFrame(columns=s.index, data=[s.tolist()]) + cycle_df["entity_id"] = "stock_cn_industry" + cycle_df["timestamp"] = timestamp + drawer = Drawer(main_df=cycle_df) + drawer.draw_pie(show=True) + + +def get_change_ratio( + entity_type="stock", + start_timestamp=None, + end_timestamp=None, + adjust_type: Union[AdjustType, str] = None, + provider="em", +): + def cal_ratio(df): + positive = df[df["direction"]] + other = df[~df["direction"]] + return len(positive) / len(other) + + if not adjust_type: + adjust_type = default_adjust_type(entity_type=entity_type) + data_schema = get_kdata_schema(entity_type=entity_type, adjust_type=adjust_type) + kdata_df = data_schema.query_data(provider=provider, start_timestamp=start_timestamp, end_timestamp=end_timestamp) + kdata_df["direction"] = kdata_df["change_pct"] > 0 + ratio_df = kdata_df.groupby("timestamp").apply(lambda df: cal_ratio(df)) + return ratio_df + + +if __name__ == "__main__": + print(get_top_performance_entities_by_periods(entity_provider="em", data_provider="em")) + + +# the __all__ is generated +__all__ = [ + "WindowMethod", + "TopType", + "get_top_performance_by_month", + "get_top_performance_entities_by_periods", + "get_top_performance_entities", + "get_top_fund_holding_stocks", + "get_performance", + "get_performance_stats_by_month", + "get_performance_stats", + "get_top_volume_entities", + "get_top_turnover_rate_entities", + "get_top_entities", + "show_month_performance", + "show_industry_composition", + "get_change_ratio", +] diff --git a/src/zvt/api/utils.py b/src/zvt/api/utils.py new file mode 100644 index 00000000..e647d1bf --- /dev/null +++ b/src/zvt/api/utils.py @@ -0,0 +1,98 @@ +# -*- coding: utf-8 -*- +from typing import Type + +from zvt.contract import Mixin +from zvt.domain import ReportPeriod +from zvt.utils.pd_utils import pd_is_not_null +from zvt.utils.time_utils import to_pd_timestamp, now_pd_timestamp + + +def to_report_period_type(report_date): + the_date = to_pd_timestamp(report_date) + if the_date.month == 3 and the_date.day == 31: + return ReportPeriod.season1.value + if the_date.month == 6 and the_date.day == 30: + return ReportPeriod.half_year.value + if the_date.month == 9 and the_date.day == 30: + return ReportPeriod.season3.value + if the_date.month == 12 and the_date.day == 31: + return ReportPeriod.year.value + + return None + + +def get_recent_report_date(the_date=now_pd_timestamp(), step=0): + the_date = to_pd_timestamp(the_date) + assert step >= 0 + if the_date.month >= 10: + recent = "{}{}".format(the_date.year, "-09-30") + elif the_date.month >= 7: + recent = "{}{}".format(the_date.year, "-06-30") + elif the_date.month >= 4: + recent = "{}{}".format(the_date.year, "-03-31") + else: + recent = "{}{}".format(the_date.year - 1, "-12-31") + + if step == 0: + return recent + else: + step = step - 1 + return get_recent_report_date(recent, step) + + +def get_recent_report_period(the_date=now_pd_timestamp(), step=0): + return to_report_period_type(get_recent_report_date(the_date, step=step)) + + +def get_china_exchange(code): + code_ = int(code) + if 800000 >= code_ >= 600000: + return "sh" + elif code_ >= 400000: + return "bj" + else: + return "sz" + + +def china_stock_code_to_id(code): + return "{}_{}_{}".format("stock", get_china_exchange(code), code) + + +def value_to_pct(value, default=0): + return value / 100 if value else default + + +def value_multiply(value, multiplier, default=0): + return value * multiplier if value else default + + +def float_to_pct_str(value): + return f"{round(value * 100, 2)}%" + + +def get_recent_report(data_schema: Type[Mixin], timestamp, entity_id=None, filters=None, max_step=2): + i = 0 + while i < max_step: + report_date = get_recent_report_date(the_date=timestamp, step=i) + if filters: + filters = filters + [data_schema.report_date == to_pd_timestamp(report_date)] + else: + filters = [data_schema.report_date == to_pd_timestamp(report_date)] + df = data_schema.query_data(entity_id=entity_id, filters=filters) + if pd_is_not_null(df): + return df + i = i + 1 + + +# the __all__ is generated +__all__ = [ + "to_report_period_type", + "get_recent_report_date", + "get_recent_report_period", + "get_china_exchange", + "china_stock_code_to_id", + "value_to_pct", + "value_multiply", + "float_to_pct_str", + "get_recent_report", +] diff --git a/zvt/autocode/__init__.py b/src/zvt/autocode/__init__.py similarity index 94% rename from zvt/autocode/__init__.py rename to src/zvt/autocode/__init__.py index 319e8354..fec30e8a 100644 --- a/zvt/autocode/__init__.py +++ b/src/zvt/autocode/__init__.py @@ -9,9 +9,11 @@ # import all from submodule generator from .generator import * from .generator import __all__ as _generator_all + __all__ += _generator_all # import all from submodule templates from .templates import * from .templates import __all__ as _templates_all -__all__ += _templates_all \ No newline at end of file + +__all__ += _templates_all diff --git a/src/zvt/autocode/generator.py b/src/zvt/autocode/generator.py new file mode 100644 index 00000000..c918b29c --- /dev/null +++ b/src/zvt/autocode/generator.py @@ -0,0 +1,318 @@ +# -*- coding: utf-8 -*- +import logging +import os +from typing import List + +from zvt.autocode.templates import all_tpls +from zvt.contract import IntervalLevel, AdjustType +from zvt.utils.file_utils import list_all_files +from zvt.utils.git_utils import get_git_user_name, get_git_user_email +from zvt.utils.time_utils import now_pd_timestamp + +logger = logging.getLogger(__name__) + + +def all_sub_modules(dir_path: str): + """ + list all module name in specific directory + + :param dir_path: + :return: + """ + modules = [] + for entry in os.scandir(dir_path): + if entry.is_dir() or (entry.path.endswith(".py") and not entry.path.endswith("__init__.py")): + module_name = os.path.splitext(os.path.basename(entry.path))[0] + # ignore hidden + if module_name.startswith(".") or not module_name[0].isalpha(): + continue + modules.append(module_name) + return modules + + +def _remove_start_end(line: str, start="class ", end="("): + if line.startswith(start) and (end in line): + start_index = len(start) + end_index = line.index(end) + return line[start_index:end_index] + if not start and (end in line): + end_index = line.index(end) + return line[:end_index] + + +def _get_interface_name(line: str): + """ + get interface name of the line + + :param line: the line of the source + :return: + """ + if line.startswith("class "): + return _remove_start_end(line, "class ", "(") + elif line.startswith("def "): + return _remove_start_end(line, "def ", "(") + + +def _get_var_name(line: str): + """ + get var name of the line + + :param line: the line of the source + :return: + """ + if not _get_interface_name(line): + words = line.split(" ") + if len(words) >= 2 and words[1] == "=": + return words[0] + + +def all_sub_all(sub_module): + return """ + +# import all from submodule {0} +from .{0} import * +from .{0} import __all__ as _{0}_all + +__all__ += _{0}_all""".format( + sub_module + ) + + +def fill_package_if_not_exist(dir_path: str): + fill_package(dir_path) + for entry in os.scandir(dir_path): + if entry.is_dir(): + fill_package(entry.path) + fill_package_if_not_exist(entry.path) + elif entry.is_file(): + pass + + +def fill_package(dir_path: str): + base_name = os.path.basename(dir_path) + if base_name[0].isalpha(): + if os.path.isdir(dir_path): + pkg_file = os.path.join(dir_path, "__init__.py") + if not os.path.exists(pkg_file): + package_template = "# -*- coding: utf-8 -*-\n" + with open(pkg_file, "w", encoding="utf-8") as outfile: + outfile.write(package_template) + + +def gen_exports( + dir_path="./domain", + gen_flag="# the __all__ is generated", + export_from_package=False, + exclude_modules=None, + export_modules=None, + excludes=None, + export_var=False, +): + if not excludes: + excludes = ["logger"] + if os.path.isfile(dir_path): + files = [dir_path] + else: + fill_package_if_not_exist(dir_path=dir_path) + files = list_all_files(dir_path=dir_path) + for file in files: + exports = [] + lines = [] + # read and generate __all__ + with open(file, encoding="utf-8") as fp: + line = fp.readline() + while line: + if line.startswith(gen_flag): + break + lines.append(line) + export = _get_interface_name(line) + if export_var and not export: + export = _get_var_name(line) + if export and export[0].isalpha() and export not in excludes: + exports.append(export) + line = fp.readline() + print(f"{file}:{exports}") + end_empty_lines_count = 0 + for i in range(-1, -len(lines) - 1, -1): + if not lines[i].isspace(): + break + end_empty_lines_count = end_empty_lines_count + 1 + lines = lines[: len(lines) - end_empty_lines_count] + + if not lines: + lines.append("# -*- coding: utf-8 -*-#") + + lines.append("\n\n") + lines.append(gen_flag) + lines.append("\n") + exports_str = f"__all__ = {exports}" + exports_str = exports_str.replace("'", '"') + if len(exports_str) > 120: + exports_wrap = [f'\n "{item}",' for item in exports] + exports_str = "__all__ = [" + "".join(exports_wrap) + "\n]" + exports_str = exports_str.replace("'", '"') + lines.append(exports_str) + lines.append("\n") + + # the package module + if export_from_package: + basename = os.path.basename(file) + if basename == "__init__.py": + dir_path = os.path.dirname(file) + modules = all_sub_modules(dir_path) + if modules: + if exclude_modules: + modules = set(modules) - set(exclude_modules) + if export_modules: + modules = set(modules) & set(export_modules) + lines.append( + """ +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules""" + ) + for mod in modules: + lines.append(all_sub_all(mod)) + lines.append("\n") + + # write with __all__ + with open(file, mode="w", encoding="utf-8") as fp: + fp.writelines(lines) + + +# kdata schema rule +# 1)name:{entity_type.capitalize()}{IntervalLevel.value.capitalize()}Kdata +# 2)one db file for one schema + + +def gen_kdata_schema( + pkg: str, + providers: List[str], + entity_type: str, + levels: List[IntervalLevel], + adjust_types=None, + entity_in_submodule: bool = False, + kdata_module="quotes", +): + if adjust_types is None: + adjust_types = [None] + tables = [] + + base_path = "./domain" + + if kdata_module: + base_path = os.path.join(base_path, kdata_module) + if entity_in_submodule: + base_path = os.path.join(base_path, entity_type) + + if not os.path.exists(base_path): + logger.info(f"create dir {base_path}") + os.makedirs(base_path) + + providers_str = f"{providers}".replace("'", '"') + for level in levels: + for adjust_type in adjust_types: + level = IntervalLevel(level) + + cap_entity_type = entity_type.capitalize() + cap_level = level.value.capitalize() + + # you should define {EntityType}KdataCommon in kdata_module at first + kdata_common = f"{cap_entity_type}KdataCommon" + + if adjust_type and (adjust_type != AdjustType.qfq): + class_name = f"{cap_entity_type}{cap_level}{adjust_type.value.capitalize()}Kdata" + table_name = f"{entity_type}_{level.value}_{adjust_type.value.lower()}_kdata" + else: + class_name = f"{cap_entity_type}{cap_level}Kdata" + table_name = f"{entity_type}_{level.value}_kdata" + + tables.append(table_name) + + schema_template = f"""# -*- coding: utf-8 -*- +# this file is generated by gen_kdata_schema function, dont't change it +from sqlalchemy.orm import declarative_base + +from zvt.contract.register import register_schema +from {pkg}.domain.{kdata_module} import {kdata_common} + +KdataBase = declarative_base() + + +class {class_name}(KdataBase, {kdata_common}): + __tablename__ = "{table_name}" + + +register_schema(providers={providers_str}, db_name="{table_name}", schema_base=KdataBase, entity_type="{entity_type}") + +""" + # generate the schema + with open(os.path.join(base_path, f"{table_name}.py"), "w", encoding="utf-8") as outfile: + outfile.write(schema_template) + + # generate the package + pkg_file = os.path.join(base_path, "__init__.py") + if not os.path.exists(pkg_file): + package_template = """# -*- coding: utf-8 -*- +""" + with open(pkg_file, "w", encoding="utf-8") as outfile: + outfile.write(package_template) + + # generate exports + gen_exports("./domain") + + +def gen_plugin_project(entity_type, prefix: str = "zvt", dir_path: str = ".", providers=["joinquant"]): + """ + generate a standard plugin project + + :param entity_type: the entity type of the plugin project + :param prefix: project prefix + :param dir_path: the root path for the project + :param providers: the supported providers + """ + + # generate project files + project = f"{prefix}_{entity_type}" + entity_class = entity_type.capitalize() + project_path = os.path.join(dir_path, project) + if not os.path.exists(project_path): + os.makedirs(project_path) + + current_time = now_pd_timestamp() + user_name = get_git_user_name() + user_email = get_git_user_email() + + for tpl in all_tpls(project=project, entity_type=entity_type): + file_name = tpl[0] + tpl_content = tpl[1].safe_substitute( + project=project, + entity_type=entity_type, + entity_class=entity_class, + providers=providers, + provider=providers[0], + Provider=providers[0].capitalize(), + year=current_time.year, + user=user_name, + email=user_email, + ) + file_path = os.path.join(project_path, file_name) + + file_dir = os.path.dirname(file_path) + if not os.path.exists(file_dir): + os.makedirs(file_dir) + + with open(file_path, "w", encoding="utf-8") as fh: + fh.write(tpl_content) + + +# the __all__ is generated +__all__ = [ + "all_sub_modules", + "all_sub_all", + "fill_package_if_not_exist", + "fill_package", + "gen_exports", + "gen_kdata_schema", + "gen_plugin_project", +] diff --git a/zvt/autocode/templates/.coveragerc.template b/src/zvt/autocode/templates/.coveragerc.template similarity index 100% rename from zvt/autocode/templates/.coveragerc.template rename to src/zvt/autocode/templates/.coveragerc.template diff --git a/zvt/autocode/templates/.gitignore.template b/src/zvt/autocode/templates/.gitignore.template similarity index 100% rename from zvt/autocode/templates/.gitignore.template rename to src/zvt/autocode/templates/.gitignore.template diff --git a/zvt/autocode/templates/.travis.yml.template b/src/zvt/autocode/templates/.travis.yml.template similarity index 100% rename from zvt/autocode/templates/.travis.yml.template rename to src/zvt/autocode/templates/.travis.yml.template diff --git a/zvt/autocode/templates/LICENSE.template b/src/zvt/autocode/templates/LICENSE.template similarity index 100% rename from zvt/autocode/templates/LICENSE.template rename to src/zvt/autocode/templates/LICENSE.template diff --git a/zvt/autocode/templates/MANIFEST.in.template b/src/zvt/autocode/templates/MANIFEST.in.template similarity index 100% rename from zvt/autocode/templates/MANIFEST.in.template rename to src/zvt/autocode/templates/MANIFEST.in.template diff --git a/zvt/autocode/templates/README-en.md.template b/src/zvt/autocode/templates/README-en.md.template similarity index 88% rename from zvt/autocode/templates/README-en.md.template rename to src/zvt/autocode/templates/README-en.md.template index 79795195..af5924b9 100644 --- a/zvt/autocode/templates/README-en.md.template +++ b/src/zvt/autocode/templates/README-en.md.template @@ -3,7 +3,6 @@ [![image](https://img.shields.io/pypi/l/${project}.svg)](https://pypi.org/project/${project}/) [![image](https://img.shields.io/pypi/pyversions/${project}.svg)](https://pypi.org/project/${project}/) [![Build Status](https://api.travis-ci.org/zvtvz/${project}.svg?branch=master)](https://travis-ci.org/zvtvz/${project}) -[![HitCount](http://hits.dwyl.com/zvtvz/${project}.svg)](http://hits.dwyl.com/zvtvz/${project}) ### introduction diff --git a/zvt/autocode/templates/README.md.template b/src/zvt/autocode/templates/README.md.template similarity index 88% rename from zvt/autocode/templates/README.md.template rename to src/zvt/autocode/templates/README.md.template index be4d98aa..adfef914 100644 --- a/zvt/autocode/templates/README.md.template +++ b/src/zvt/autocode/templates/README.md.template @@ -3,7 +3,6 @@ [![image](https://img.shields.io/pypi/l/${project}.svg)](https://pypi.org/project/${project}/) [![image](https://img.shields.io/pypi/pyversions/${project}.svg)](https://pypi.org/project/${project}/) [![Build Status](https://api.travis-ci.org/zvtvz/${project}.svg?branch=master)](https://travis-ci.org/zvtvz/${project}) -[![HitCount](http://hits.dwyl.com/zvtvz/${project}.svg)](http://hits.dwyl.com/zvtvz/${project}) ### 说明 diff --git a/src/zvt/autocode/templates/__init__.py b/src/zvt/autocode/templates/__init__.py new file mode 100644 index 00000000..3a5f0be3 --- /dev/null +++ b/src/zvt/autocode/templates/__init__.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +import os +import string + +from pkg_resources import resource_string + +from zvt.utils.file_utils import list_all_files + + +def all_tpls(project: str, entity_type: str): + """ + return list of templates(location,Template) + + :param project: + :return: + """ + tpl_dir = os.path.join(os.path.dirname(__file__)) + tpl_files = list_all_files(tpl_dir, ext="template", return_base_name=True) + tpls = [] + for tpl in tpl_files: + data = resource_string(__name__, tpl) + file_location = os.path.splitext(os.path.basename(tpl))[0] + # we assure that line endings are converted to '\n' for all OS + data = data.decode(encoding="utf-8").replace(os.linesep, "\n") + + # change path for specific file + # domain + if file_location == "kdata_common.py": + file_location = f"{project}/domain/quotes/__init__.py" + elif file_location == "meta.py": + file_location = f"{project}/domain/{entity_type}_meta.py" + # recorder + elif file_location == "kdata_recorder.py": + file_location = f"{project}/recorders/{entity_type}_kdata_recorder.py" + elif file_location == "meta_recorder.py": + file_location = f"{project}/recorders/{entity_type}_meta_recorder.py" + # fill script + elif file_location == "fill_project.py": + file_location = f"{project}/fill_project.py" + # tests + elif file_location == "test_pass.py": + file_location = f"tests/test_pass.py" + elif file_location == "pkg_init.py": + file_location = f"{project}/__init__.py" + + tpls.append((file_location, string.Template(data))) + return tpls + + +# the __all__ is generated +__all__ = ["all_tpls"] diff --git a/zvt/autocode/templates/fill_project.py.template b/src/zvt/autocode/templates/fill_project.py.template similarity index 100% rename from zvt/autocode/templates/fill_project.py.template rename to src/zvt/autocode/templates/fill_project.py.template diff --git a/zvt/autocode/templates/kdata_common.py.template b/src/zvt/autocode/templates/kdata_common.py.template similarity index 100% rename from zvt/autocode/templates/kdata_common.py.template rename to src/zvt/autocode/templates/kdata_common.py.template diff --git a/zvt/autocode/templates/kdata_recorder.py.template b/src/zvt/autocode/templates/kdata_recorder.py.template similarity index 64% rename from zvt/autocode/templates/kdata_recorder.py.template rename to src/zvt/autocode/templates/kdata_recorder.py.template index 2c1f492d..cd2c6f61 100644 --- a/zvt/autocode/templates/kdata_recorder.py.template +++ b/src/zvt/autocode/templates/kdata_recorder.py.template @@ -3,7 +3,7 @@ # -*- coding: utf-8 -*- from zvt import IntervalLevel -from zvt.api import get_kdata_schema +from zvt.api.kdata import get_kdata_schema from zvt.contract.recorder import FixedCycleDataRecorder from ${project}.domain import ${entity_class}, ${entity_class}KdataCommon @@ -18,15 +18,15 @@ class ${Provider}${entity_class}KdataRecorder(FixedCycleDataRecorder): # register the recorder to data_schema data_schema = ${entity_class}KdataCommon - def __init__(self, entity_type='${entity_type}', exchanges=None, entity_ids=None, codes=None, day_data=False, batch_size=10, - force_update=True, sleeping_time=10, entity_filters=None,default_size=2000, real_time=False, fix_duplicate_way='ignore', - start_timestamp=None, end_timestamp=None, close_hour=0, close_minute=0, level=IntervalLevel.LEVEL_1DAY, + def __init__(self, entity_type='${entity_type}', exchanges=None, entity_ids=None, codes=None, day_data=False, + force_update=True, sleeping_time=10, entity_filters=None, real_time=False, fix_duplicate_way='ignore', + start_timestamp=None, end_timestamp=None, level=IntervalLevel.LEVEL_1DAY, kdata_use_begin_time=False, one_day_trading_minutes=24 * 60) -> None: level = IntervalLevel(level) self.data_schema = get_kdata_schema(entity_type=entity_type, level=level, adjust_type=None) - super().__init__(entity_type, exchanges, entity_ids, codes, day_data, batch_size, force_update, sleeping_time, - entity_filters, default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour, + super().__init__(entity_type, exchanges, entity_ids, codes, day_data, force_update, sleeping_time, + entity_filters, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour, close_minute, level, kdata_use_begin_time, one_day_trading_minutes) def record(self, entity, start, end, size, timestamps): diff --git a/zvt/autocode/templates/meta.py.template b/src/zvt/autocode/templates/meta.py.template similarity index 91% rename from zvt/autocode/templates/meta.py.template rename to src/zvt/autocode/templates/meta.py.template index f2c328d1..6dd582ca 100644 --- a/zvt/autocode/templates/meta.py.template +++ b/src/zvt/autocode/templates/meta.py.template @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- from sqlalchemy import Column, DateTime, Boolean -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import declarative_base from zvt.contract import EntityMixin from zvt.contract.register import register_schema, register_entity diff --git a/zvt/autocode/templates/meta_recorder.py.template b/src/zvt/autocode/templates/meta_recorder.py.template similarity index 100% rename from zvt/autocode/templates/meta_recorder.py.template rename to src/zvt/autocode/templates/meta_recorder.py.template diff --git a/zvt/autocode/templates/pkg_init.py.template b/src/zvt/autocode/templates/pkg_init.py.template similarity index 100% rename from zvt/autocode/templates/pkg_init.py.template rename to src/zvt/autocode/templates/pkg_init.py.template diff --git a/zvt/autocode/templates/requirements.txt.template b/src/zvt/autocode/templates/requirements.txt.template similarity index 100% rename from zvt/autocode/templates/requirements.txt.template rename to src/zvt/autocode/templates/requirements.txt.template diff --git a/src/zvt/autocode/templates/setup.py.template b/src/zvt/autocode/templates/setup.py.template new file mode 100644 index 00000000..40c50a66 --- /dev/null +++ b/src/zvt/autocode/templates/setup.py.template @@ -0,0 +1,66 @@ +#!/usr/bin/env python + +# To use a consistent encoding +from codecs import open +from os import path + +# Always prefer setuptools over distutils +from setuptools import setup, find_packages + +try: + # for pip >= 10 + from pip._internal.req import parse_requirements +except ImportError: + # for pip <= 9.0.3 + from pip.req import parse_requirements + +here = path.abspath(path.dirname(__file__)) + +# Get the long description from the README file +with open(path.join(here, "README.md"), encoding="utf-8") as f: + long_description = f.read() + +# Arguments marked as "Required" below must be included for upload to PyPI. +# Fields marked as "Optional" may be commented out. + +install_reqs = parse_requirements("requirements.txt", session=False) + +try: + requirements = [str(ir.req) for ir in install_reqs] +except: + requirements = [str(ir.requirement) for ir in install_reqs] + +setup( + name="${project}", + version="0.0.1", + description="unified,modular quant framework for human beings ", + long_description=long_description, + url="https://github.com/zvtvz/${project}", + author="${user}", + author_email="${email}", + classifiers=[ # Optional + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "Intended Audience :: Customer Service", + "Intended Audience :: Education", + "Intended Audience :: Financial and Insurance Industry", + "Topic :: Software Development :: Build Tools", + "Topic :: Office/Business :: Financial :: Investment", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + ], + keywords="quant stock finance fintech big-data zvt ma-analysis trading-platform pandas fundamental-analysis", + packages=find_packages(include=["${project}.*", "${project}"]), + python_requires=">=3.5, <4", + include_package_data=True, + install_requires=requirements, + project_urls={ + "Bug Reports": "https://github.com/zvtvz/${project}/issues", + "Funding": "https://github.com/zvtvz/${project}", + "Say Thanks!": "https://saythanks.io/to/foolcage", + "Source": "https://github.com/zvtvz/${project}", + }, + long_description_content_type="text/markdown", +) diff --git a/zvt/autocode/templates/test_pass.py.template b/src/zvt/autocode/templates/test_pass.py.template similarity index 100% rename from zvt/autocode/templates/test_pass.py.template rename to src/zvt/autocode/templates/test_pass.py.template diff --git a/src/zvt/broker/__init__.py b/src/zvt/broker/__init__.py new file mode 100644 index 00000000..2686fff0 --- /dev/null +++ b/src/zvt/broker/__init__.py @@ -0,0 +1,5 @@ +# -*- coding: utf-8 -*- + + +# the __all__ is generated +__all__ = [] diff --git a/src/zvt/broker/qmt/__init__.py b/src/zvt/broker/qmt/__init__.py new file mode 100644 index 00000000..2686fff0 --- /dev/null +++ b/src/zvt/broker/qmt/__init__.py @@ -0,0 +1,5 @@ +# -*- coding: utf-8 -*- + + +# the __all__ is generated +__all__ = [] diff --git a/src/zvt/broker/qmt/context.py b/src/zvt/broker/qmt/context.py new file mode 100644 index 00000000..995b035d --- /dev/null +++ b/src/zvt/broker/qmt/context.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +from typing import Optional + +from zvt import zvt_env +from zvt.broker.qmt.qmt_account import QmtStockAccount + + +class QmtContext(object): + def __init__(self): + self.qmt_account: Optional[QmtStockAccount] = None + + +qmt_context = QmtContext() + + +def init_qmt_account(qmt_mini_data_path=None, qmt_account_id=None): + if not qmt_mini_data_path: + qmt_mini_data_path = zvt_env["qmt_mini_data_path"] + if not qmt_account_id: + qmt_account_id = zvt_env["qmt_account_id"] + qmt_context.qmt_account = QmtStockAccount( + path=qmt_mini_data_path, account_id=qmt_account_id, trader_name="zvt", session_id=None + ) + + +init_qmt_account() + + +# the __all__ is generated +__all__ = ["QmtContext", "init_qmt_account"] diff --git a/src/zvt/broker/qmt/errors.py b/src/zvt/broker/qmt/errors.py new file mode 100644 index 00000000..ba5c8678 --- /dev/null +++ b/src/zvt/broker/qmt/errors.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +class TraderError(Exception): + """Base class for exceptions in this module.""" + + pass + + +class QmtError(TraderError): + def __init__(self, message="qmt error"): + self.message = message + + +class PositionOverflowError(TraderError): + def __init__(self, message="超出仓位限制"): + self.message = message + + +# the __all__ is generated +__all__ = ["TraderError", "QmtError", "PositionOverflowError"] diff --git a/src/zvt/broker/qmt/qmt_account.py b/src/zvt/broker/qmt/qmt_account.py new file mode 100644 index 00000000..cbd76ab3 --- /dev/null +++ b/src/zvt/broker/qmt/qmt_account.py @@ -0,0 +1,305 @@ +# -*- coding: utf-8 -*- +import logging +import time +from typing import List + +from xtquant import xtconstant, xtdata +from xtquant.xttrader import XtQuantTrader, XtQuantTraderCallback +from xtquant.xttype import StockAccount, XtPosition + +from zvt.broker.qmt.errors import QmtError, PositionOverflowError +from zvt.broker.qmt.qmt_quote import _to_qmt_code +from zvt.common.trading_models import BuyParameter, PositionType, SellParameter +from zvt.trader import AccountService, TradingSignal, OrderType, trading_signal_type_to_order_type +from zvt.utils.time_utils import now_pd_timestamp, to_pd_timestamp + +logger = logging.getLogger(__name__) + + +def _to_qmt_order_type(order_type: OrderType): + if order_type == OrderType.order_long: + return xtconstant.STOCK_BUY + elif order_type == OrderType.order_close_long: + return xtconstant.STOCK_SELL + + +class MyXtQuantTraderCallback(XtQuantTraderCallback): + def on_connected(self): + logger.info("qmt on_connected") + + def on_smt_appointment_async_response(self, response): + logger.info(f"qmt on_smt_appointment_async_response: {vars(response)}") + + def on_cancel_order_stock_async_response(self, response): + logger.info(f"qmt on_cancel_order_stock_async_response: {vars(response)}") + + def on_disconnected(self): + """ + 连接断开 + :return: + """ + logger.info(f"qmt on_disconnected") + + def on_stock_order(self, order): + """ + 委托回报推送 + :param order: XtOrder对象 + :return: + """ + logger.info(f"qmt on_stock_order: {vars(order)}") + + def on_stock_asset(self, asset): + """ + 资金变动推送 + :param asset: XtAsset对象 + :return: + """ + logger.info(f"qmt on_stock_asset: {vars(asset)}") + + def on_stock_trade(self, trade): + """ + 成交变动推送 + :param trade: XtTrade对象 + :return: + """ + logger.info(f"qmt on_stock_trade: {vars(trade)}") + + def on_stock_position(self, position): + """ + 持仓变动推送 + :param position: XtPosition对象 + :return: + """ + logger.info(f"qmt on_stock_position: {vars(position)}") + + def on_order_error(self, order_error): + """ + 委托失败推送 + :param order_error:XtOrderError 对象 + :return: + """ + logger.info(f"qmt on_order_error: {vars(order_error)}") + + def on_cancel_error(self, cancel_error): + """ + 撤单失败推送 + :param cancel_error: XtCancelError 对象 + :return: + """ + logger.info(f"qmt on_cancel_error: {vars(cancel_error)}") + + def on_order_stock_async_response(self, response): + """ + 异步下单回报推送 + :param response: XtOrderResponse 对象 + :return: + """ + logger.info(f"qmt on_order_stock_async_response: {vars(response)}") + + def on_account_status(self, status): + """ + :param response: XtAccountStatus 对象 + :return: + """ + logger.info(status.account_id, status.account_type, status.status) + + +class QmtStockAccount(AccountService): + def __init__(self, path, account_id, trader_name, session_id=None) -> None: + if not session_id: + session_id = int(time.time()) + self.trader_name = trader_name + logger.info(f"path: {path}, account: {account_id}, trader_name: {trader_name}, session: {session_id}") + + self.xt_trader = XtQuantTrader(path=path, session=session_id) + + # StockAccount可以用第二个参数指定账号类型,如沪港通传'HUGANGTONG',深港通传'SHENGANGTONG' + self.account = StockAccount(account_id=account_id, account_type="STOCK") + + # 创建交易回调类对象,并声明接收回调 + callback = MyXtQuantTraderCallback() + self.xt_trader.register_callback(callback) + + # 启动交易线程 + self.xt_trader.start() + + # 建立交易连接,返回0表示连接成功 + connect_result = self.xt_trader.connect() + if connect_result != 0: + logger.error(f"qmt trader 连接失败: {connect_result}") + raise QmtError(f"qmt trader 连接失败: {connect_result}") + logger.info("qmt trader 建立交易连接成功!") + + # 对交易回调进行订阅,订阅后可以收到交易主推,返回0表示订阅成功 + subscribe_result = self.xt_trader.subscribe(self.account) + + if subscribe_result != 0: + logger.error(f"账号订阅失败: {subscribe_result}") + raise QmtError(f"账号订阅失败: {subscribe_result}") + logger.info("账号订阅成功!") + + def get_positions(self): + positions: List[XtPosition] = self.xt_trader.query_stock_positions(self.account) + return positions + + def get_current_position(self, entity_id, create_if_not_exist=False): + stock_code = _to_qmt_code(entity_id=entity_id) + # 根据股票代码查询对应持仓 + return self.xt_trader.query_stock_position(self.account, stock_code) + + def get_current_account(self): + asset = self.xt_trader.query_stock_asset(self.account) + return asset + + def order_by_amount(self, entity_id, order_price, order_timestamp, order_type, order_amount): + stock_code = _to_qmt_code(entity_id=entity_id) + fix_result_order_id = self.xt_trader.order_stock( + account=self.account, + stock_code=stock_code, + order_type=_to_qmt_order_type(order_type=order_type), + order_volume=order_amount, + price_type=xtconstant.FIX_PRICE, + price=order_price, + strategy_name=self.trader_name, + order_remark="order from zvt", + ) + logger.info(f"order result id: {fix_result_order_id}") + + def on_trading_signals(self, trading_signals: List[TradingSignal]): + for trading_signal in trading_signals: + try: + self.handle_trading_signal(trading_signal) + except Exception as e: + logger.exception(e) + self.on_trading_error(timestamp=trading_signal.happen_timestamp, error=e) + + def handle_trading_signal(self, trading_signal: TradingSignal): + entity_id = trading_signal.entity_id + happen_timestamp = trading_signal.happen_timestamp + order_type = trading_signal_type_to_order_type(trading_signal.trading_signal_type) + trading_level = trading_signal.trading_level.value + # askPrice 多档委卖价 + # bidPrice 多档委买价 + # askVol 多档委卖量 + # bidVol 多档委买量 + if now_pd_timestamp() > to_pd_timestamp(trading_signal.due_timestamp): + logger.warning( + f"the signal is expired, now {now_pd_timestamp()} is after due time: {trading_signal.due_timestamp}" + ) + return + quote = xtdata.get_l2_quote(stock_code=_to_qmt_code(entity_id=entity_id), start_time=happen_timestamp) + if order_type == OrderType.order_long: + price = quote["askPrice"] + elif order_type == OrderType.order_close_long: + price = quote["bidPrice"] + else: + assert False + self.order_by_amount( + entity_id=entity_id, + order_price=price, + order_timestamp=happen_timestamp, + order_type=order_type, + order_amount=trading_signal.order_amount, + ) + + def on_trading_open(self, timestamp): + pass + + def on_trading_close(self, timestamp): + pass + + def on_trading_finish(self, timestamp): + pass + + def on_trading_error(self, timestamp, error): + pass + + def sell(self, position_strategy: SellParameter): + # account_type int 账号类型,参见数据字典 + # account_id str 资金账号 + # stock_code str 证券代码 + # volume int 持仓数量 + # can_use_volume int 可用数量 + # open_price float 开仓价 + # market_value float 市值 + # frozen_volume int 冻结数量 + # on_road_volume int 在途股份 + # yesterday_volume int 昨夜拥股 + # avg_price float 成本价 + # direction int 多空方向,股票不适用;参见数据字典 + stock_codes = [_to_qmt_code(entity_id) for entity_id in position_strategy.entity_ids] + for i, stock_code in enumerate(stock_codes): + pct = position_strategy.sell_pcts[i] + position = self.xt_trader.query_stock_position(self.account, stock_code) + fix_result_order_id = self.xt_trader.order_stock( + account=self.account, + stock_code=stock_code, + order_type=xtconstant.STOCK_SELL, + order_volume=int(position.can_use_volume * pct), + price_type=xtconstant.MARKET_SH_CONVERT_5_CANCEL, + price=0, + strategy_name=self.trader_name, + order_remark="order from zvt", + ) + logger.info(f"order result id: {fix_result_order_id}") + + def buy(self, buy_parameter: BuyParameter): + # account_type int 账号类型,参见数据字典 + # account_id str 资金账号 + # cash float 可用金额 + # frozen_cash float 冻结金额 + # market_value float 持仓市值 + # total_asset float 总资产 + acc = self.get_current_account() + + # 优先使用金额下单 + if buy_parameter.money_to_use: + money_to_use = buy_parameter.money_to_use + if acc.cash < money_to_use: + raise QmtError(f"可用余额不足 {acc.cash} < {money_to_use}") + else: + # 检查仓位 + if buy_parameter.position_type == PositionType.normal: + current_pct = round(acc.market_value / acc.total_asset, 2) + if current_pct >= buy_parameter.position_pct: + raise PositionOverflowError(f"目前仓位为{current_pct}, 已超过请求的仓位: {buy_parameter.position_pct}") + + money_to_use = acc.total_asset * (buy_parameter.position_pct - current_pct) + elif buy_parameter.position_type == PositionType.cash: + money_to_use = acc.cash * buy_parameter.position_pct + else: + assert False + + stock_codes = [_to_qmt_code(entity_id) for entity_id in buy_parameter.entity_ids] + ticks = xtdata.get_full_tick(code_list=stock_codes) + + if not buy_parameter.weights: + stocks_count = len(stock_codes) + money_for_stocks = [round(money_to_use / stocks_count)] * stocks_count + else: + weights_sum = sum(buy_parameter.weights) + money_for_stocks = [round(weight / weights_sum) for weight in buy_parameter.weights] + + for i, stock_code in enumerate(stock_codes): + try_price = ticks[stock_code]["askPrice"][3] + volume = money_for_stocks[i] / try_price + fix_result_order_id = self.xt_trader.order_stock( + account=self.account, + stock_code=stock_code, + order_type=xtconstant.STOCK_BUY, + order_volume=volume, + price_type=xtconstant.MARKET_SH_CONVERT_5_CANCEL, + price=0, + strategy_name=self.trader_name, + order_remark="order from zvt", + ) + logger.info(f"order result id: {fix_result_order_id}") + + +if __name__ == "__main__": + account = QmtStockAccount(path=r"D:\qmt\userdata_mini", account_id="") + account.get_positions() + + +# the __all__ is generated +__all__ = ["MyXtQuantTraderCallback", "QmtStockAccount"] diff --git a/src/zvt/broker/qmt/qmt_quote.py b/src/zvt/broker/qmt/qmt_quote.py new file mode 100644 index 00000000..454f9c5f --- /dev/null +++ b/src/zvt/broker/qmt/qmt_quote.py @@ -0,0 +1,348 @@ +# -*- coding: utf-8 -*- +import logging +import time + +import numpy as np +import pandas as pd +from xtquant import xtdata + +from zvt.contract import Exchange +from zvt.contract import IntervalLevel, AdjustType +from zvt.contract.api import decode_entity_id, df_to_db, get_db_session +from zvt.domain import StockQuote, Stock, Stock1dKdata +from zvt.domain.quotes.stock.stock_quote import Stock1mQuote, StockQuoteLog +from zvt.recorders.em import em_api +from zvt.utils.pd_utils import pd_is_not_null +from zvt.utils.time_utils import ( + to_time_str, + current_date, + to_pd_timestamp, + now_pd_timestamp, + TIME_FORMAT_MINUTE, + date_time_by_interval, + TIME_FORMAT_MINUTE2, + now_timestamp, +) + +# https://dict.thinktrader.net/nativeApi/start_now.html?id=e2M5nZ + +logger = logging.getLogger(__name__) + + +def _to_qmt_code(entity_id): + _, exchange, code = decode_entity_id(entity_id=entity_id) + return f"{code}.{exchange.upper()}" + + +def _to_zvt_entity_id(qmt_code): + code, exchange = qmt_code.split(".") + exchange = exchange.lower() + return f"stock_{exchange}_{code}" + + +def _to_qmt_dividend_type(adjust_type: AdjustType): + if adjust_type == AdjustType.qfq: + return "front" + elif adjust_type == AdjustType.hfq: + return "back" + else: + return "none" + + +def _qmt_instrument_detail_to_stock(stock_detail): + exchange = stock_detail["ExchangeID"].lower() + code = stock_detail["InstrumentID"] + name = stock_detail["InstrumentName"] + list_date = to_pd_timestamp(stock_detail["OpenDate"]) + try: + end_date = to_pd_timestamp(stock_detail["ExpireDate"]) + except: + end_date = None + + pre_close = stock_detail["PreClose"] + limit_up_price = stock_detail["UpStopPrice"] + limit_down_price = stock_detail["DownStopPrice"] + float_volume = stock_detail["FloatVolume"] + total_volume = stock_detail["TotalVolume"] + + entity_id = f"stock_{exchange}_{code}" + + return { + "id": entity_id, + "entity_id": entity_id, + "timestamp": list_date, + "entity_type": "stock", + "exchange": exchange, + "code": code, + "name": name, + "list_date": list_date, + "end_date": end_date, + "pre_close": pre_close, + "limit_up_price": limit_up_price, + "limit_down_price": limit_down_price, + "float_volume": float_volume, + "total_volume": total_volume, + } + + +def get_qmt_stocks(): + df = em_api.get_tradable_list(exchange=Exchange.bj) + bj_stock_list = df["entity_id"].map(_to_qmt_code).tolist() + + stock_list = xtdata.get_stock_list_in_sector("沪深A股") + stock_list += bj_stock_list + return stock_list + + +def get_entity_list(): + stocks = get_qmt_stocks() + entity_list = [] + + for stock in stocks: + stock_detail = xtdata.get_instrument_detail(stock, False) + if stock_detail: + entity_list.append(_qmt_instrument_detail_to_stock(stock_detail)) + else: + code, exchange = stock.split(".") + exchange = exchange.lower() + entity_id = f"stock_{exchange}_{code}" + # get from provider em + datas = Stock.query_data(provider="em", entity_id=entity_id, return_type="dict") + if datas: + entity = datas[0] + else: + entity = { + "id": _to_zvt_entity_id(stock), + "entity_id": entity_id, + "entity_type": "stock", + "exchange": exchange, + "code": code, + "name": "未获取", + } + + # xtdata.download_financial_data(stock_list=[stock], table_list=["Capital"]) + capital_datas = xtdata.get_financial_data( + [stock], + table_list=["Capital"], + report_type="report_time", + ) + df = capital_datas[stock]["Capital"] + if pd_is_not_null(df): + latest_data = df.iloc[-1] + entity["float_volume"] = latest_data["circulating_capital"] + entity["total_volume"] = latest_data["total_capital"] + + tick = xtdata.get_full_tick(code_list=[stock]) + if tick and tick[stock]: + if code.startswith(("83", "87", "88", "889", "82", "920")): + limit_up_price = tick[stock]["lastClose"] * 1.3 + limit_down_price = tick[stock]["lastClose"] * 0.7 + elif code.startswith("300") or code.startswith("688"): + limit_up_price = tick[stock]["lastClose"] * 1.2 + limit_down_price = tick[stock]["lastClose"] * 0.8 + else: + limit_up_price = tick[stock]["lastClose"] * 1.1 + limit_down_price = tick[stock]["lastClose"] * 0.9 + entity["limit_up_price"] = round(limit_up_price, 2) + entity["limit_down_price"] = round(limit_down_price, 2) + entity_list.append(entity) + + return pd.DataFrame.from_records(data=entity_list) + + +def get_kdata( + entity_id, + start_timestamp, + end_timestamp, + level=IntervalLevel.LEVEL_1DAY, + adjust_type=AdjustType.qfq, + download_history=True, +): + code = _to_qmt_code(entity_id=entity_id) + period = level.value + start_time = to_time_str(start_timestamp, fmt="YYYYMMDDHHmmss") + end_time = to_time_str(end_timestamp, fmt="YYYYMMDDHHmmss") + # download比较耗时,建议单独定时任务来做 + if download_history: + print(f"download from {start_time} to {end_time}") + xtdata.download_history_data( + stock_code=code, period=period, + start_time=start_time, end_time=end_time + ) + records = xtdata.get_market_data( + stock_list=[code], + period=period, + start_time=to_time_str(start_timestamp, fmt="YYYYMMDDHHmmss"), + end_time=to_time_str(end_timestamp, fmt="YYYYMMDDHHmmss"), + dividend_type=_to_qmt_dividend_type(adjust_type=adjust_type), + fill_data=False, + ) + + dfs = [] + for col in records: + df = records[col].T + df.columns = [col] + dfs.append(df) + df = pd.concat(dfs, axis=1) + df["volume"] = df["volume"] * 100 + return df + + +def tick_to_quote(): + entity_list = get_entity_list() + entity_df = entity_list[ + ["entity_id", "code", "name", "limit_up_price", "limit_down_price", "float_volume", "total_volume"] + ] + entity_df = entity_df.set_index("entity_id", drop=False) + + def calculate_limit_up_amount(row): + if row["is_limit_up"]: + return row["price"] * row["bidVol"][0] * 100 + else: + return None + + def calculate_limit_down_amount(row): + if row["is_limit_down"]: + return row["price"] * row["askVol"][0] * 100 + else: + return None + + def on_data(datas, stock_df=entity_df): + start_time = time.time() + + for code in datas: + delay = (now_timestamp() - datas[code]["time"]) / (60 * 1000) + logger.info(f"check delay for {code}") + if delay < 2: + break + else: + logger.warning(f"delay {delay} minutes, may need to restart this script or qmt client") + break + + tick_df = pd.DataFrame.from_records(data=[datas[code] for code in datas], index=list(datas.keys())) + + # 过滤无效tick,一般是退市的 + tick_df = tick_df[tick_df["lastPrice"] != 0] + tick_df.index = tick_df.index.map(_to_zvt_entity_id) + + df = pd.concat( + [ + stock_df.loc[tick_df.index,], + tick_df, + ], + axis=1, + ) + + df = df.rename(columns={"lastPrice": "price", "amount": "turnover"}) + df["close"] = df["price"] + + df["timestamp"] = df["time"].apply(to_pd_timestamp) + + df["id"] = df[["entity_id", "timestamp"]].apply( + lambda se: "{}_{}".format(se["entity_id"], to_time_str(se["timestamp"])), axis=1 + ) + + df["volume"] = df["pvolume"] + df["avg_price"] = df["turnover"] / df["volume"] + # 换手率 + df["turnover_rate"] = df["pvolume"] / df["float_volume"] + # 涨跌幅 + df["change_pct"] = (df["price"] - df["lastClose"]) / df["lastClose"] + # 盘口卖单金额 + df["ask_amount"] = df.apply( + lambda row: np.sum(np.array(row["askPrice"]) * (np.array(row["askVol"]) * 100)), axis=1 + ) + # 盘口买单金额 + df["bid_amount"] = df.apply( + lambda row: np.sum(np.array(row["bidPrice"]) * (np.array(row["bidVol"]) * 100)), axis=1 + ) + # 涨停 + df["is_limit_up"] = (df["price"] != 0) & (df["price"] >= df["limit_up_price"]) + df["limit_up_amount"] = df.apply(lambda row: calculate_limit_up_amount(row), axis=1) + + # 跌停 + df["is_limit_down"] = (df["price"] != 0) & (df["price"] <= df["limit_down_price"]) + df["limit_down_amount"] = df.apply(lambda row: calculate_limit_down_amount(row), axis=1) + + df["float_cap"] = df["float_volume"] * df["price"] + df["total_cap"] = df["total_volume"] * df["price"] + + df["provider"] = "qmt" + # 实时行情统计,只保留最新 + df_to_db(df, data_schema=StockQuote, provider="qmt", force_update=True, drop_duplicates=False) + df["level"] = "1d" + df_to_db(df, data_schema=Stock1dKdata, provider="qmt", force_update=True, drop_duplicates=False) + + # 1分钟分时 + df["id"] = df[["entity_id", "timestamp"]].apply( + lambda se: "{}_{}".format(se["entity_id"], to_time_str(se["timestamp"], TIME_FORMAT_MINUTE)), axis=1 + ) + df_to_db(df, data_schema=Stock1mQuote, provider="qmt", force_update=True, drop_duplicates=False) + # 历史记录 + df["id"] = df[["entity_id", "timestamp"]].apply( + lambda se: "{}_{}".format(se["entity_id"], to_time_str(se["timestamp"], TIME_FORMAT_MINUTE2)), axis=1 + ) + df_to_db(df, data_schema=StockQuoteLog, provider="qmt", force_update=True, drop_duplicates=False) + + cost_time = time.time() - start_time + logger.info(f"Quotes cost_time:{cost_time} for {len(datas.keys())} stocks") + + return on_data + + +def download_capital_data(): + stocks = get_qmt_stocks() + xtdata.download_financial_data2( + stock_list=stocks, table_list=["Capital"], start_time="", end_time="", callback=lambda x: print(x) + ) + + +def clear_history_quote(): + session = get_db_session("qmt", data_schema=StockQuote) + session.query(StockQuote).filter(StockQuote.timestamp < current_date()).delete() + start_date = date_time_by_interval(current_date(), -10) + session.query(Stock1mQuote).filter(Stock1mQuote.timestamp < start_date).delete() + session.query(StockQuoteLog).filter(StockQuoteLog.timestamp < start_date).delete() + session.commit() + + +def record_tick(): + clear_history_quote() + Stock.record_data(provider="em") + stocks = get_qmt_stocks() + logger.info(f"subscribe tick for {len(stocks)} stocks") + sid = xtdata.subscribe_whole_quote(stocks, callback=tick_to_quote()) + + """阻塞线程接收行情回调""" + import time + + client = xtdata.get_client() + while True: + time.sleep(3) + if not client.is_connected(): + raise Exception("行情服务连接断开") + current_timestamp = now_pd_timestamp() + if current_timestamp.hour >= 15 and current_timestamp.minute >= 10: + logger.info(f"record tick finished at: {current_timestamp}") + break + xtdata.unsubscribe_quote(sid) + + +if __name__ == "__main__": + from apscheduler.schedulers.background import BackgroundScheduler + + sched = BackgroundScheduler() + record_tick() + sched.add_job(func=record_tick, trigger="cron", hour=9, minute=18, day_of_week="mon-fri") + sched.start() + sched._thread.join() + +# the __all__ is generated +__all__ = [ + "get_qmt_stocks", + "get_entity_list", + "get_kdata", + "tick_to_quote", + "download_capital_data", + "clear_history_quote", +] diff --git a/src/zvt/common/__init__.py b/src/zvt/common/__init__.py new file mode 100644 index 00000000..2686fff0 --- /dev/null +++ b/src/zvt/common/__init__.py @@ -0,0 +1,5 @@ +# -*- coding: utf-8 -*- + + +# the __all__ is generated +__all__ = [] diff --git a/src/zvt/common/query_models.py b/src/zvt/common/query_models.py new file mode 100644 index 00000000..d464295d --- /dev/null +++ b/src/zvt/common/query_models.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +from datetime import datetime +from enum import Enum +from typing import Optional + +from pydantic import BaseModel, Field + + +class OrderByType(Enum): + asc = "asc" + desc = "desc" + + +class TimeUnit(Enum): + year = "year" + month = "month" + day = "day" + hour = "hour" + minute = "minute" + second = "second" + + +class AbsoluteTimeRange(BaseModel): + start_timestamp: datetime + end_timestamp: datetime + + +class RelativeTimeRage(BaseModel): + interval: int + time_unit: TimeUnit + + +class TimeRange(BaseModel): + absolute_time_range: Optional[AbsoluteTimeRange] = Field(default=None) + relative_time_range: Optional[RelativeTimeRage] = Field(default=None) + + +# the __all__ is generated +__all__ = ["OrderByType", "TimeUnit", "AbsoluteTimeRange", "RelativeTimeRage", "TimeRange"] diff --git a/src/zvt/common/trading_models.py b/src/zvt/common/trading_models.py new file mode 100644 index 00000000..5002ea3d --- /dev/null +++ b/src/zvt/common/trading_models.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +from enum import Enum +from typing import List, Optional + +from pydantic import BaseModel, Field + + +class PositionType(Enum): + # 按整体仓位算 + normal = "normal" + + # 不管整体仓位多少 + # 按现金算 + cash = "cash" + + +class BuyParameter(BaseModel): + entity_ids: List[str] + position_type: PositionType = Field(default=PositionType.normal) + position_pct: Optional[float] = Field(default=None) + weights: Optional[List[float]] = Field(default=None) + money_to_use: Optional[float] = Field(default=None) + + +class SellParameter(BaseModel): + entity_ids: List[str] + sell_pcts: Optional[List[float]] = Field(default=None) + + +class TradingResult(BaseModel): + success_entity_ids: Optional[List[str]] = Field(default=None) + failed_entity_ids: Optional[List[str]] = Field(default=None) + + +# the __all__ is generated +__all__ = ["PositionType", "BuyParameter", "SellParameter", "TradingResult"] diff --git a/zvt/config.json b/src/zvt/config.json similarity index 61% rename from zvt/config.json rename to src/zvt/config.json index 6080e43c..06748d08 100644 --- a/zvt/config.json +++ b/src/zvt/config.json @@ -8,5 +8,9 @@ "email_username": "", "email_password": "", "wechat_app_id": "", - "wechat_app_secrect": "" + "wechat_app_secrect": "", + "qmt_mini_data_path": "D:\\qmt\\userdata_mini", + "qmt_account_id": "", + "moonshot_api_key": "", + "qwen_api_key": "" } \ No newline at end of file diff --git a/src/zvt/consts.py b/src/zvt/consts.py new file mode 100644 index 00000000..7d87b366 --- /dev/null +++ b/src/zvt/consts.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +import os +from pathlib import Path + +# zvt home dir +ZVT_HOME = os.environ.get("ZVT_HOME") +if not ZVT_HOME: + ZVT_HOME = os.path.abspath(os.path.join(Path.home(), "zvt-home")) + +# data for testing +ZVT_TEST_HOME = os.path.abspath(os.path.join(Path.home(), "zvt-test-home")) +ZVT_TEST_ZIP_DATA_PATH = os.path.join(ZVT_TEST_HOME, "data.zip") +ZVT_TEST_DATA_PATH = os.path.join(ZVT_TEST_HOME, "data") + +DATA_SAMPLE_ZIP_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), "samples", "data.zip")) + +# ****** setting for stocks ****** # +SAMPLE_STOCK_CODES = ["000001", "000002"] + +# 沪深300,证券,中证500,上证50,创业板,军工,传媒,资源 +SAMPLE_ETF_CODES = ["510300", "512880", "510500", "510050", "159915", "512660", "512980", "510410"] + +# 上证指数 上证50 沪深300 中证500 中证1000 科创50 +# 深证成指(399001) 创业板指(399006) 国证成长(399370)国证价值(399371)国证基金(399379) 国证ETF(399380) +IMPORTANT_INDEX = [ + "000001", + "000016", + "000300", + "000905", + "000852", + "000688", + "399001", + "399006", + "399370", + "399371", + "399379", + "399380", +] diff --git a/src/zvt/contract/__init__.py b/src/zvt/contract/__init__.py new file mode 100644 index 00000000..b95c1fd4 --- /dev/null +++ b/src/zvt/contract/__init__.py @@ -0,0 +1,279 @@ +# -*- coding: utf-8 -*- +from enum import Enum + + +class IntervalLevel(Enum): + """ + Repeated fixed time interval, e.g, 5m, 1d. + """ + + #: level l2 quote + LEVEL_L2_QUOTE = "l2quote" + #: level tick + LEVEL_TICK = "tick" + #: 1 minute + LEVEL_1MIN = "1m" + #: 5 minutes + LEVEL_5MIN = "5m" + #: 15 minutes + LEVEL_15MIN = "15m" + #: 30 minutes + LEVEL_30MIN = "30m" + #: 1 hour + LEVEL_1HOUR = "1h" + #: 4 hours + LEVEL_4HOUR = "4h" + #: 1 day + LEVEL_1DAY = "1d" + #: 1 week + LEVEL_1WEEK = "1wk" + #: 1 month + LEVEL_1MON = "1mon" + + def to_pd_freq(self): + if self == IntervalLevel.LEVEL_1MIN: + return "1min" + if self == IntervalLevel.LEVEL_5MIN: + return "5min" + if self == IntervalLevel.LEVEL_15MIN: + return "15min" + if self == IntervalLevel.LEVEL_30MIN: + return "30min" + if self == IntervalLevel.LEVEL_1HOUR: + return "1H" + if self == IntervalLevel.LEVEL_4HOUR: + return "4H" + if self >= IntervalLevel.LEVEL_1DAY: + return "1D" + + def floor_timestamp(self, pd_timestamp): + if self == IntervalLevel.LEVEL_1MIN: + return pd_timestamp.floor("1min") + if self == IntervalLevel.LEVEL_5MIN: + return pd_timestamp.floor("5min") + if self == IntervalLevel.LEVEL_15MIN: + return pd_timestamp.floor("15min") + if self == IntervalLevel.LEVEL_30MIN: + return pd_timestamp.floor("30min") + if self == IntervalLevel.LEVEL_1HOUR: + return pd_timestamp.floor("1h") + if self == IntervalLevel.LEVEL_4HOUR: + return pd_timestamp.floor("4h") + if self == IntervalLevel.LEVEL_1DAY: + return pd_timestamp.floor("1d") + + def to_minute(self): + return int(self.to_second() / 60) + + def to_second(self): + return int(self.to_ms() / 1000) + + def to_ms(self): + """ + To seconds count in the interval + + :return: seconds count in the interval + """ + #: we treat tick intervals is 5s, you could change it + if self == IntervalLevel.LEVEL_TICK: + return 5 * 1000 + if self == IntervalLevel.LEVEL_1MIN: + return 60 * 1000 + if self == IntervalLevel.LEVEL_5MIN: + return 5 * 60 * 1000 + if self == IntervalLevel.LEVEL_15MIN: + return 15 * 60 * 1000 + if self == IntervalLevel.LEVEL_30MIN: + return 30 * 60 * 1000 + if self == IntervalLevel.LEVEL_1HOUR: + return 60 * 60 * 1000 + if self == IntervalLevel.LEVEL_4HOUR: + return 4 * 60 * 60 * 1000 + if self == IntervalLevel.LEVEL_1DAY: + return 24 * 60 * 60 * 1000 + if self == IntervalLevel.LEVEL_1WEEK: + return 7 * 24 * 60 * 60 * 1000 + if self == IntervalLevel.LEVEL_1MON: + return 31 * 7 * 24 * 60 * 60 * 1000 + + def __ge__(self, other): + if self.__class__ is other.__class__: + return self.to_ms() >= other.to_ms() + return NotImplemented + + def __gt__(self, other): + + if self.__class__ is other.__class__: + return self.to_ms() > other.to_ms() + return NotImplemented + + def __le__(self, other): + if self.__class__ is other.__class__: + return self.to_ms() <= other.to_ms() + return NotImplemented + + def __lt__(self, other): + if self.__class__ is other.__class__: + return self.to_ms() < other.to_ms() + return NotImplemented + + +class AdjustType(Enum): + """ + split-adjusted type for :class:`~.zvt.contract.schema.TradableEntity` quotes + + """ + + #: not adjusted + #: 不复权 + bfq = "bfq" + #: pre adjusted + #: 前复权 + qfq = "qfq" + #: post adjusted + #: 后复权 + hfq = "hfq" + + +class ActorType(Enum): + #: 个人 + individual = "individual" + #: 公募基金 + raised_fund = "raised_fund" + #: 社保 + social_security = "social_security" + #: 保险 + insurance = "insurance" + #: 外资 + qfii = "qfii" + #: 信托 + trust = "trust" + #: 券商 + broker = "qmt" + #: 私募 + private_equity = "private_equity" + #: 公司(可能包括私募) + corporation = "corporation" + + +class TradableType(Enum): + #: A股(中国) + #: China stock + stock = "stock" + #: 可转债(中国) + #: China convertible Bond + cbond = "cbond" + #: A股指数(中国) + #: China index + index = "index" + #: A股板块(中国) + #: China stock block + block = "block" + #: 美股 + #: USA stock + stockus = "stockus" + #: 美股指数 + #: USA index + indexus = "indexus" + #: 港股 + #: Hongkong Stock + stockhk = "stockhk" + #: 期货(中国) + #: China future + future = "future" + #: 数字货币 + #: Cryptocurrency + coin = "coin" + #: 期权(中国) + #: China option + option = "option" + #: 基金(中国) + #: China fund + fund = "fund" + #: 货币汇率 + #: currency exchange rate + currency = "currency" + + +class Exchange(Enum): + #: 上证交易所 + sh = "sh" + #: 深证交易所 + sz = "sz" + #: 北交所 + bj = "bj" + + #: 对于中国的非交易所的 标的 + cn = "cn" + #: 对于美国的非交易所的 标的 + us = "us" + + #: 纳斯达克 + nasdaq = "nasdaq" + + #: 纽交所 + nyse = "nyse" + + #: 港交所 + hk = "hk" + + #: 数字货币 + binance = "binance" + huobipro = "huobipro" + + #: 上海期货交易所 + shfe = "shfe" + #: 大连商品交易所 + dce = "dce" + #: 郑州商品交易所 + czce = "czce" + #: 中国金融期货交易所 + cffex = "cffex" + #: 上海国际能源交易中心 + ine = "ine" + + #: 广州期货所 + gfex = "gfex" + + #: 外汇交易所(虚拟) + #: currency exchange(virtual) + forex = "forex" + #: 人民币中间价 + + +tradable_type_map_exchanges = { + TradableType.block: [Exchange.cn], + TradableType.index: [Exchange.sh, Exchange.sz], + TradableType.stock: [Exchange.sh, Exchange.sz, Exchange.bj], + TradableType.cbond: [Exchange.sh, Exchange.sz], + TradableType.stockhk: [Exchange.hk], + TradableType.stockus: [Exchange.nasdaq, Exchange.nyse], + TradableType.indexus: [Exchange.us], + TradableType.future: [Exchange.shfe, Exchange.dce, Exchange.czce, Exchange.cffex, Exchange.ine], + TradableType.coin: [Exchange.binance, Exchange.huobipro], + TradableType.currency: [Exchange.forex], +} + + +def get_entity_exchanges(entity_type): + entity_type = TradableType(entity_type) + return tradable_type_map_exchanges.get(entity_type) + + +from .context import zvt_context + +zvt_context = zvt_context + + +# the __all__ is generated +__all__ = ["IntervalLevel", "AdjustType", "ActorType", "TradableType", "Exchange", "get_entity_exchanges"] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule schema +from .schema import * +from .schema import __all__ as _schema_all + +__all__ += _schema_all diff --git a/src/zvt/contract/api.py b/src/zvt/contract/api.py new file mode 100644 index 00000000..cfcd48b5 --- /dev/null +++ b/src/zvt/contract/api.py @@ -0,0 +1,718 @@ +# -*- coding: utf-8 -*- +import json +import logging +import os +import platform +from typing import List, Union, Type + +import pandas as pd +from sqlalchemy import create_engine +from sqlalchemy import func, exists, and_ +from sqlalchemy.engine import Engine +from sqlalchemy.ext.declarative import DeclarativeMeta +from sqlalchemy.orm import Query +from sqlalchemy.orm import sessionmaker, Session +from sqlalchemy.sql.expression import text + +from zvt import zvt_env +from zvt.contract import IntervalLevel +from zvt.contract import zvt_context +from zvt.contract.schema import Mixin, TradableEntity +from zvt.utils.pd_utils import pd_is_not_null, index_df +from zvt.utils.time_utils import to_pd_timestamp + +logger = logging.getLogger(__name__) + + +def _get_db_name(data_schema: DeclarativeMeta) -> str: + """ + get db name of the domain schema + + :param data_schema: the data schema + :return: db name + """ + for db_name, base in zvt_context.dbname_map_base.items(): + if issubclass(data_schema, base): + return db_name + + +def get_db_engine( + provider: str, db_name: str = None, data_schema: object = None, data_path: str = zvt_env["data_path"] +) -> Engine: + """ + get db engine from (provider,db_name) or (provider,data_schema) + + :param provider: data provider + :param db_name: db name + :param data_schema: data schema + :param data_path: data path + :return: db engine + """ + if data_schema: + db_name = _get_db_name(data_schema=data_schema) + + provider_path = os.path.join(data_path, provider) + if not os.path.exists(provider_path): + os.makedirs(provider_path) + db_path = os.path.join(provider_path, "{}_{}.db?check_same_thread=False".format(provider, db_name)) + + engine_key = "{}_{}".format(provider, db_name) + db_engine = zvt_context.db_engine_map.get(engine_key) + if not db_engine: + db_engine = create_engine( + "sqlite:///" + db_path, echo=False, json_serializer=lambda obj: json.dumps(obj, ensure_ascii=False) + ) + zvt_context.db_engine_map[engine_key] = db_engine + return db_engine + + +def get_providers() -> List[str]: + return zvt_context.providers + + +def get_schemas(provider: str) -> List[DeclarativeMeta]: + """ + get domain schemas supported by the provider + + :param provider: data provider + :return: schemas provided by the provider + """ + schemas = [] + for provider1, dbs in zvt_context.provider_map_dbnames.items(): + if provider == provider1: + for dbname in dbs: + schemas1 = zvt_context.dbname_map_schemas.get(dbname) + if schemas1: + schemas += schemas1 + return schemas + + +def get_db_session(provider: str, db_name: str = None, data_schema: object = None, force_new: bool = False) -> Session: + """ + get db session from (provider,db_name) or (provider,data_schema) + + :param provider: data provider + :param db_name: db name + :param data_schema: data schema + :param force_new: True for new session, otherwise use global session + :return: db session + """ + if data_schema: + db_name = _get_db_name(data_schema=data_schema) + + session_key = "{}_{}".format(provider, db_name) + + if force_new: + return get_db_session_factory(provider, db_name, data_schema)() + + session = zvt_context.sessions.get(session_key) + # FIXME: should not maintain global session + if not session: + session = get_db_session_factory(provider, db_name, data_schema)() + zvt_context.sessions[session_key] = session + return session + + +def get_db_session_factory(provider: str, db_name: str = None, data_schema: object = None): + """ + get db session factory from (provider,db_name) or (provider,data_schema) + + :param provider: data provider + :param db_name: db name + :param data_schema: data schema + :return: db session factory + """ + if data_schema: + db_name = _get_db_name(data_schema=data_schema) + + session_key = "{}_{}".format(provider, db_name) + session = zvt_context.db_session_map.get(session_key) + if not session: + session = sessionmaker() + zvt_context.db_session_map[session_key] = session + return session + + +DBSession = get_db_session_factory + + +def get_entity_schema(entity_type: str) -> Type[TradableEntity]: + """ + get entity schema from name + + :param entity_type: entity type, e.g. stock, stockus. + :return: the Schema of the entity + """ + return zvt_context.tradable_schema_map[entity_type] + + +def get_schema_by_name(name: str) -> DeclarativeMeta: + """ + get domain schema by the name + + :param name: schema name + :return: schema + """ + for schema in zvt_context.schemas: + if schema.__name__ == name: + return schema + + +def get_schema_columns(schema: DeclarativeMeta) -> List[str]: + """ + get all columns of the domain schema + + :param schema: data schema + :return: columns of the schema + """ + return schema.__table__.columns.keys() + + +def common_filter( + query: Query, + data_schema, + start_timestamp=None, + end_timestamp=None, + filters=None, + order=None, + limit=None, + distinct=None, + time_field="timestamp", +): + """ + build filter by the arguments + + :param query: sql query + :param data_schema: data schema + :param start_timestamp: start timestamp + :param end_timestamp: end timestamp + :param filters: sql filters + :param order: sql order + :param limit: sql limit size + :param time_field: time field in columns + :return: result query + """ + assert data_schema is not None + time_col = eval("data_schema.{}".format(time_field)) + + if start_timestamp: + query = query.filter(time_col >= to_pd_timestamp(start_timestamp)) + if end_timestamp: + query = query.filter(time_col <= to_pd_timestamp(end_timestamp)) + + if filters: + for filter in filters: + query = query.filter(filter) + if order is not None: + query = query.order_by(order) + else: + query = query.order_by(time_col.asc()) + if limit: + query = query.limit(limit) + if distinct: + query = query.distinct(distinct) + + return query + + +def del_data(data_schema: Type[Mixin], filters: List = None, provider=None): + """ + delete data by filters + + :param data_schema: data schema + :param filters: filters + :param provider: data provider + """ + if not provider: + provider = data_schema.providers[0] + + session = get_db_session(provider=provider, data_schema=data_schema) + query = session.query(data_schema) + if filters: + for f in filters: + query = query.filter(f) + query.delete() + session.commit() + + +def get_by_id(data_schema, id: str, provider: str = None, session: Session = None): + """ + get one record by id from data schema + + :param data_schema: data schema + :param id: the record id + :param provider: data provider + :param session: db session + :return: the record of the id + """ + if "providers" not in data_schema.__dict__: + logger.error("no provider registered for: {}", data_schema) + if not provider: + provider = data_schema.providers[0] + + if not session: + session = get_db_session(provider=provider, data_schema=data_schema) + + return session.query(data_schema).get(id) + + +def _row2dict(row): + d = {} + for column in row.__table__.columns: + d[column.name] = getattr(row, column.name) + return d + + +def get_data( + data_schema: Type[Mixin], + ids: List[str] = None, + entity_ids: List[str] = None, + entity_id: str = None, + codes: List[str] = None, + code: str = None, + level: Union[IntervalLevel, str] = None, + provider: str = None, + columns: List = None, + col_label: dict = None, + return_type: str = "df", + start_timestamp: Union[pd.Timestamp, str] = None, + end_timestamp: Union[pd.Timestamp, str] = None, + filters: List = None, + session: Session = None, + order=None, + limit: int = None, + distinct=None, + index: Union[str, list] = None, + drop_index_col=False, + time_field: str = "timestamp", +): + """ + query data by the arguments + + :param data_schema: + :param ids: + :param entity_ids: + :param entity_id: + :param codes: + :param code: + :param level: + :param provider: + :param columns: + :param col_label: dict with key(column), value(label) + :param return_type: df, domain or dict. default is df + :param start_timestamp: + :param end_timestamp: + :param filters: + :param session: + :param order: + :param limit: + :param index: index field name, str for single index, str list for multiple index + :param drop_index_col: whether drop the col if it's in index, default False + :param time_field: + :return: results basing on return_type. + """ + if "providers" not in data_schema.__dict__: + logger.error("no provider registered for: {}", data_schema) + if not provider: + provider = data_schema.providers[0] + + if not session: + session = get_db_session(provider=provider, data_schema=data_schema) + + time_col = eval("data_schema.{}".format(time_field)) + + if columns: + # support str + for i, col in enumerate(columns): + if isinstance(col, str): + columns[i] = eval("data_schema.{}".format(col)) + + # make sure get timestamp + if time_col not in columns: + columns.append(time_col) + + if col_label: + columns_ = [] + for col in columns: + if col.name in col_label: + columns_.append(col.label(col_label.get(col.name))) + else: + columns_.append(col) + columns = columns_ + + query = session.query(*columns) + else: + query = session.query(data_schema) + + if entity_id: + query = query.filter(data_schema.entity_id == entity_id) + if entity_ids: + query = query.filter(data_schema.entity_id.in_(entity_ids)) + if code: + query = query.filter(data_schema.code == code) + if codes: + query = query.filter(data_schema.code.in_(codes)) + if ids: + query = query.filter(data_schema.id.in_(ids)) + + # we always store different level in different schema,the level param is not useful now + if level: + try: + #: some schema has no level,just ignore it + data_schema.level + if type(level) == IntervalLevel: + level = level.value + query = query.filter(data_schema.level == level) + except Exception as e: + pass + + query = common_filter( + query, + data_schema=data_schema, + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + filters=filters, + order=order, + limit=limit, + distinct=distinct, + time_field=time_field, + ) + + if return_type == "df": + df = pd.read_sql(query.statement, query.session.bind) + if pd_is_not_null(df): + if index: + df = index_df(df, index=index, drop=drop_index_col, time_field=time_field) + return df + elif return_type == "domain": + return query.all() + elif return_type == "dict": + domains = query.all() + return [_row2dict(item) for item in domains] + elif return_type == "select": + return query.selectable + + +def data_exist(session, schema, id): + """ + whether exist data of the id + + :param session: + :param schema: + :param id: + :return: + """ + return session.query(exists().where(and_(schema.id == id))).scalar() + + +def get_data_count(data_schema, filters=None, session=None): + """ + get record count basing on the filters + + :param data_schema: + :param filters: + :param session: + :return: + """ + query = session.query(data_schema) + if filters: + for filter in filters: + query = query.filter(filter) + + count_q = query.statement.with_only_columns(func.count(data_schema.id)).order_by(None) + count = session.execute(count_q).scalar() + return count + + +def get_group(provider, data_schema, column, group_func=func.count, session=None): + if not session: + session = get_db_session(provider=provider, data_schema=data_schema) + if group_func: + query = session.query(column, group_func(column)).group_by(column) + else: + query = session.query(column).group_by(column) + df = pd.read_sql(query.statement, query.session.bind) + return df + + +def decode_entity_id(entity_id: str): + """ + decode entity id to entity_type, exchange, code + + :param entity_id: + :return: tuple with format (entity_type, exchange, code) + """ + result = entity_id.split("_") + entity_type = result[0] + exchange = result[1] + code = "".join(result[2:]) + return entity_type, exchange, code + + +def get_entity_type(entity_id: str): + """ + get entity type by entity id + + :param entity_id: + :return: entity type + """ + entity_type, _, _ = decode_entity_id(entity_id) + return entity_type + + +def get_entity_exchange(entity_id: str): + """ + get exchange by entity id + + :param entity_id: + :return: exchange + """ + _, exchange, _ = decode_entity_id(entity_id) + return exchange + + +def get_entity_code(entity_id: str): + """ + get code by entity id + + :param entity_id: + :return: code + """ + _, _, code = decode_entity_id(entity_id) + return code + + +def df_to_db( + df: pd.DataFrame, + data_schema: DeclarativeMeta, + provider: str, + force_update: bool = False, + sub_size: int = 5000, + drop_duplicates: bool = True, + dtype=None, + session=None, + need_check=True, +) -> object: + """ + store the df to db + + :param df: data with columns of the schema + :param data_schema: data schema + :param provider: data provider + :param force_update: whether update the data with id existed + :param sub_size: update batch size + :param drop_duplicates: whether drop duplicates + :return: + """ + if not pd_is_not_null(df): + return 0 + + if drop_duplicates and df.duplicated(subset="id").any(): + logger.warning(f"remove duplicated:{df[df.duplicated()]}") + df = df.drop_duplicates(subset="id", keep="last") + + schema_cols = get_schema_columns(data_schema) + cols = set(df.columns.tolist()) & set(schema_cols) + + if not cols: + print("wrong cols") + return 0 + + cols = list(cols) + df = df[cols] + + size = len(df) + + if platform.system() == "Windows": + sub_size = 900 + + if size >= sub_size: + step_size = int(size / sub_size) + if size % sub_size: + step_size = step_size + 1 + else: + step_size = 1 + + saved = 0 + + if not session: + session = get_db_session(provider=provider, data_schema=data_schema) + + for step in range(step_size): + df_current = df.iloc[sub_size * step : sub_size * (step + 1)] + + if need_check: + if force_update: + ids = df_current["id"].tolist() + if len(ids) == 1: + sql = text(f'delete from `{data_schema.__tablename__}` where id = "{ids[0]}"') + else: + sql = text(f"delete from `{data_schema.__tablename__}` where id in {tuple(ids)}") + + session.execute(sql) + else: + current = get_data( + session=session, + data_schema=data_schema, + columns=[data_schema.id], + provider=provider, + ids=df_current["id"].tolist(), + ) + if pd_is_not_null(current): + df_current = df_current[~df_current["id"].isin(current["id"])] + + if pd_is_not_null(df_current): + saved = saved + len(df_current) + df_current.to_sql( + data_schema.__tablename__, session.connection(), index=False, if_exists="append", dtype=dtype + ) + session.commit() + return saved + + +def get_entities( + entity_schema: Type[TradableEntity] = None, + entity_type: str = None, + exchanges: List[str] = None, + ids: List[str] = None, + entity_ids: List[str] = None, + entity_id: str = None, + codes: List[str] = None, + code: str = None, + provider: str = None, + columns: List = None, + col_label: dict = None, + return_type: str = "df", + start_timestamp: Union[pd.Timestamp, str] = None, + end_timestamp: Union[pd.Timestamp, str] = None, + filters: List = None, + session: Session = None, + order=None, + limit: int = None, + index: Union[str, list] = "code", +) -> List: + """ + get entities by the arguments + + :param entity_schema: + :param entity_type: + :param exchanges: + :param ids: + :param entity_ids: + :param entity_id: + :param codes: + :param code: + :param provider: + :param columns: + :param col_label: + :param return_type: + :param start_timestamp: + :param end_timestamp: + :param filters: + :param session: + :param order: + :param limit: + :param index: + :return: + """ + if not entity_schema: + entity_schema = zvt_context.tradable_schema_map[entity_type] + + if not provider: + provider = entity_schema.providers[0] + + if not order: + order = entity_schema.code.asc() + + if exchanges: + if filters: + filters.append(entity_schema.exchange.in_(exchanges)) + else: + filters = [entity_schema.exchange.in_(exchanges)] + + return get_data( + data_schema=entity_schema, + ids=ids, + entity_ids=entity_ids, + entity_id=entity_id, + codes=codes, + code=code, + level=None, + provider=provider, + columns=columns, + col_label=col_label, + return_type=return_type, + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + filters=filters, + session=session, + order=order, + limit=limit, + index=index, + ) + + +def get_entity_ids( + entity_type="stock", + entity_schema: TradableEntity = None, + exchanges=None, + codes=None, + provider=None, + filters=None, + entity_ids=None, +): + """ + get entity ids by the arguments + + :param entity_type: + :param entity_schema: + :param exchanges: + :param codes: + :param provider: + :param filters: + :param entity_ids: + :return: + """ + df = get_entities( + entity_type=entity_type, + entity_schema=entity_schema, + exchanges=exchanges, + codes=codes, + provider=provider, + filters=filters, + entity_ids=entity_ids, + ) + if pd_is_not_null(df): + return df["entity_id"].to_list() + return None + + +if __name__ == "__main__": + print(get_entities(entity_type="block")) + + +# the __all__ is generated +__all__ = [ + "get_db_engine", + "get_providers", + "get_schemas", + "get_db_session", + "get_db_session_factory", + "get_entity_schema", + "get_schema_by_name", + "get_schema_columns", + "common_filter", + "del_data", + "get_by_id", + "get_data", + "data_exist", + "get_data_count", + "get_group", + "decode_entity_id", + "get_entity_type", + "get_entity_exchange", + "get_entity_code", + "df_to_db", + "get_entities", + "get_entity_ids", +] diff --git a/src/zvt/contract/base_service.py b/src/zvt/contract/base_service.py new file mode 100644 index 00000000..32212f25 --- /dev/null +++ b/src/zvt/contract/base_service.py @@ -0,0 +1,119 @@ +# -*- coding: utf-8 -*- +import json +from typing import Type, List + +from zvt.contract.api import del_data, get_db_session +from zvt.contract.zvt_info import StateMixin +from zvt.utils.str_utils import to_snake_str + + +class StatefulService(object): + """ + Base service with state could be stored in state_schema + """ + + #: state schema + state_schema: Type[StateMixin] = None + + #: name of the service, default name of class if not set manually + name = None + + def __init__(self) -> None: + assert self.state_schema is not None + if self.name is None: + self.name = to_snake_str(type(self).__name__) + self.state_session = get_db_session(data_schema=self.state_schema, provider="zvt") + + def clear_state_data(self, entity_id=None): + """ + clear state of the entity + + :param entity_id: entity id + """ + filters = [self.state_schema.state_name == self.name] + if entity_id: + filters = filters + [self.state_schema.entity_id == entity_id] + del_data(self.state_schema, filters=filters) + + def decode_state(self, state: str): + """ + decode state + + :param state: + :return: + """ + + return json.loads(state, object_hook=self.state_object_hook()) + + def encode_state(self, state: object): + """ + encode state + + :param state: + :return: + """ + + return json.dumps(state, cls=self.state_encoder()) + + def state_object_hook(self): + return None + + def state_encoder(self): + return None + + +class OneStateService(StatefulService): + """ + StatefulService which saving all states in one object + """ + + def __init__(self) -> None: + super().__init__() + self.state_domain = self.state_schema.get_by_id(id=self.name) + if self.state_domain: + self.state: dict = self.decode_state(self.state_domain.state) + else: + self.state = None + + def persist_state(self): + state_str = self.encode_state(self.state) + if not self.state_domain: + self.state_domain = self.state_schema(id=self.name, entity_id=self.name, state_name=self.name) + self.state_domain.state = state_str + self.state_session.add(self.state_domain) + self.state_session.commit() + + +class EntityStateService(StatefulService): + """ + StatefulService which saving one state one entity + """ + + def __init__(self, entity_ids) -> None: + super().__init__() + self.entity_ids = entity_ids + state_domains: List[StateMixin] = self.state_schema.query_data( + filters=[self.state_schema.state_name == self.name], entity_ids=self.entity_ids, return_type="domain" + ) + + #: entity_id:state + self.states: dict = {} + if state_domains: + for state in state_domains: + self.states[state.entity_id] = self.decode_state(state.state) + + def persist_state(self, entity_id): + state = self.states.get(entity_id) + if state: + domain_id = f"{self.name}_{entity_id}" + state_domain = self.state_schema.get_by_id(domain_id) + state_str = self.encode_state(state) + if not state_domain: + state_domain = self.state_schema(id=domain_id, entity_id=entity_id, state_name=self.name) + state_domain.state = state_str + self.state_session.add(state_domain) + self.state_session.commit() + + +# the __all__ is generated +__all__ = ["StatefulService", "OneStateService", "EntityStateService"] diff --git a/src/zvt/contract/context.py b/src/zvt/contract/context.py new file mode 100644 index 00000000..a91f47e3 --- /dev/null +++ b/src/zvt/contract/context.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- + + +class Registry(object): + """ + Class storing zvt registering meta + """ + + def __init__(self) -> None: + #: all registered providers + self.providers = [] + + #: all registered entity types(str) + self.tradable_entity_types = [] + + #: all entity schemas + self.tradable_entity_schemas = [] + + #: all registered schemas + self.schemas = [] + + #: tradable entity type -> schema + self.tradable_schema_map = {} + + #: global sessions + self.sessions = {} + + #: provider_dbname -> engine + self.db_engine_map = {} + + #: provider_dbname -> session + self.db_session_map = {} + + #: provider -> [db_name1,db_name2...] + self.provider_map_dbnames = {} + + #: db_name -> [declarative_base1,declarative_base2...] + self.dbname_map_base = {} + + #: db_name -> [declarative_meta1,declarative_meta2...] + self.dbname_map_schemas = {} + + #: entity_type -> related schemas + self.entity_map_schemas = {} + + #: factor class registry + self.factor_cls_registry = {} + + +#: :class:`~.zvt.contract.context.Registry` instance +zvt_context = Registry() + + +# the __all__ is generated +__all__ = ["Registry"] diff --git a/zvt/contract/data_type.py b/src/zvt/contract/data_type.py similarity index 86% rename from zvt/contract/data_type.py rename to src/zvt/contract/data_type.py index bf0f78c2..d0fc4ac6 100644 --- a/zvt/contract/data_type.py +++ b/src/zvt/contract/data_type.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- -class Bean(object): +class Bean(object): def __init__(self) -> None: super().__init__() self.__dict__ @@ -13,3 +13,7 @@ def from_dct(self, dct: dict): if dct: for k in dct: self.__dict__[k] = dct[k] + + +# the __all__ is generated +__all__ = ["Bean"] diff --git a/src/zvt/contract/drawer.py b/src/zvt/contract/drawer.py new file mode 100644 index 00000000..6a1ebeb6 --- /dev/null +++ b/src/zvt/contract/drawer.py @@ -0,0 +1,745 @@ +# -*- coding: utf-8 -*- +import logging +from enum import Enum +from typing import List, Optional + +import numpy as np +import pandas as pd +import plotly.graph_objs as go +from plotly.subplots import make_subplots + +from zvt.contract.api import decode_entity_id +from zvt.contract.data_type import Bean +from zvt.contract.normal_data import NormalData +from zvt.utils.decorator import to_string +from zvt.utils.pd_utils import pd_is_not_null + +logger = logging.getLogger(__name__) + + +class ChartType(Enum): + """ + Chart type enum + """ + + #: candlestick chart + kline = "kline" + #: line chart + line = "line" + #: area chart + area = "area" + #: scatter chart + scatter = "scatter" + #: histogram chart + histogram = "histogram" + #: pie chart + pie = "pie" + #: bar chart + bar = "bar" + + +_zvt_chart_type_map_scatter_mode = {ChartType.line: "lines", ChartType.area: "none", ChartType.scatter: "markers"} + + +@to_string +class Rect(Bean): + """ + rect struct with left-bottom(x0, y0), right-top(x1, y1) + """ + + def __init__(self, x0=None, y0=None, x1=None, y1=None) -> None: + #: left-bottom x0 + self.x0 = x0 + #: left-bottom y0 + self.y0 = y0 + #: right-top x1 + self.x1 = x1 + #: right-top y1 + self.y1 = y1 + + +class Draw(object): + def draw_kline( + self, width=None, height=None, title=None, keep_ui_state=True, show=False, scale_value=None, **kwargs + ): + return self.draw( + main_chart=ChartType.kline, + width=width, + height=height, + title=title, + keep_ui_state=keep_ui_state, + show=show, + scale_value=scale_value, + **kwargs, + ) + + def draw_line( + self, width=None, height=None, title=None, keep_ui_state=True, show=False, scale_value=None, **kwargs + ): + return self.draw( + main_chart=ChartType.line, + width=width, + height=height, + title=title, + keep_ui_state=keep_ui_state, + show=show, + scale_value=scale_value, + **kwargs, + ) + + def draw_area( + self, width=None, height=None, title=None, keep_ui_state=True, show=False, scale_value=None, **kwargs + ): + return self.draw( + main_chart=ChartType.area, + width=width, + height=height, + title=title, + keep_ui_state=keep_ui_state, + show=show, + scale_value=scale_value, + **kwargs, + ) + + def draw_scatter( + self, width=None, height=None, title=None, keep_ui_state=True, show=False, scale_value=None, **kwargs + ): + return self.draw( + main_chart=ChartType.scatter, + width=width, + height=height, + title=title, + keep_ui_state=keep_ui_state, + show=show, + scale_value=scale_value, + **kwargs, + ) + + def draw_histogram( + self, width=None, height=None, title=None, keep_ui_state=True, show=False, scale_value=None, **kwargs + ): + return self.draw( + ChartType.histogram, + width=width, + height=height, + title=title, + keep_ui_state=keep_ui_state, + show=show, + scale_value=scale_value, + **kwargs, + ) + + def draw_bar(self, width=None, height=None, title=None, keep_ui_state=True, show=False, scale_value=None, **kwargs): + return self.draw( + ChartType.bar, + width=width, + height=height, + title=title, + keep_ui_state=keep_ui_state, + show=show, + scale_value=scale_value, + **kwargs, + ) + + def draw_pie(self, width=None, height=None, title=None, keep_ui_state=True, show=False, scale_value=None, **kwargs): + return self.draw( + ChartType.pie, + width=width, + height=height, + title=title, + keep_ui_state=keep_ui_state, + show=show, + scale_value=scale_value, + **kwargs, + ) + + def draw( + self, + main_chart=ChartType.kline, + sub_chart="bar", + width=None, + height=None, + title=None, + keep_ui_state=True, + show=False, + scale_value=None, + **kwargs, + ): + + raise NotImplementedError() + + def default_layout(self, main_chart=None, width=None, height=None, title=None, keep_ui_state=True, **layout_params): + if keep_ui_state: + uirevision = True + else: + uirevision = None + + if main_chart == ChartType.histogram: + xaxis = None + else: + xaxis = dict( + linecolor="#BCCCDC", + showgrid=False, + showspikes=True, # Show spike line for X-axis + # Format spike + spikethickness=2, + spikedash="dot", + spikecolor="#999999", + spikemode="across", + rangeselector=dict( + buttons=list( + [ + dict(count=1, label="1m", step="month", stepmode="backward"), + dict(count=3, label="3m", step="month", stepmode="backward"), + dict(count=6, label="6m", step="month", stepmode="backward"), + dict(count=1, label="YTD", step="year", stepmode="todate"), + dict(count=1, label="1y", step="year", stepmode="backward"), + dict(step="all"), + ] + ) + ), + rangeslider=dict( + visible=True, + ), + type="date", + ) + + return dict( + showlegend=True, + plot_bgcolor="#FFF", + hovermode="x", + hoverdistance=100, # Distance to show hover label of data point + spikedistance=1000, # Distance to show spike + uirevision=uirevision, + height=height, + width=width, + title=title, + yaxis=dict( + autorange=True, + fixedrange=False, + zeroline=False, + linecolor="#BCCCDC", + showgrid=False, + ), + xaxis=xaxis, + legend_orientation="h", + hoverlabel={"namelength": -1}, + **layout_params, + ) + + +class Drawable(object): + def drawer(self): + drawer = Drawer( + main_df=self.drawer_main_df(), + main_data=self.drawer_main_data(), + factor_df_list=self.drawer_factor_df_list(), + factor_data_list=self.drawer_factor_data_list(), + sub_df_list=self.drawer_sub_df_list(), + sub_data_list=self.drawer_sub_data_list(), + sub_col_chart=self.drawer_sub_col_chart(), + annotation_df=self.drawer_annotation_df(), + rects=self.drawer_rects(), + ) + return drawer + + def draw( + self, + main_chart=ChartType.kline, + width=None, + height=None, + title=None, + keep_ui_state=True, + show=False, + scale_value=None, + **kwargs, + ): + return self.drawer().draw( + main_chart=main_chart, + width=width, + height=height, + title=title, + keep_ui_state=keep_ui_state, + show=show, + scale_value=scale_value, + **kwargs, + ) + + def drawer_main_df(self) -> Optional[pd.DataFrame]: + return None + + def drawer_main_data(self) -> Optional[NormalData]: + return None + + def drawer_factor_df_list(self) -> Optional[List[pd.DataFrame]]: + return None + + def drawer_factor_data_list(self) -> Optional[List[NormalData]]: + return None + + def drawer_sub_df_list(self) -> Optional[List[pd.DataFrame]]: + return None + + def drawer_sub_data_list(self) -> Optional[List[NormalData]]: + return None + + def drawer_annotation_df(self) -> Optional[pd.DataFrame]: + return None + + def drawer_rects(self) -> Optional[List[Rect]]: + return None + + def drawer_sub_col_chart(self) -> Optional[dict]: + return None + + +class StackedDrawer(Draw): + def __init__(self, *drawers) -> None: + super().__init__() + assert len(drawers) > 1 + self.drawers: List[Drawer] = drawers + + def make_y_layout(self, index, total, start_index=1, domain_range=(0, 1)): + part = (domain_range[1] - domain_range[0]) / total + + if index == 1: + yaxis = "yaxis" + y = "y" + else: + yaxis = f"yaxis{index}" + y = f"y{index}" + + return ( + yaxis, + y, + dict( + anchor="x", + autorange=True, + fixedrange=False, + zeroline=False, + linecolor="#BCCCDC", + showgrid=False, + domain=[ + domain_range[0] + part * (index - start_index), + domain_range[0] + part * (index - start_index + 1), + ], + ), + ) + + def draw( + self, + main_chart=ChartType.kline, + sub_chart="bar", + width=None, + height=None, + title=None, + keep_ui_state=True, + show=False, + scale_value=None, + **kwargs, + ): + stacked_fig = go.Figure() + + total = len(self.drawers) + start = 1 + domain_range = (0, 1) + for drawer in self.drawers: + if drawer.has_sub_plot(): + domain_range = (0.2, 1) + start = 2 + break + for index, drawer in enumerate(self.drawers, start=start): + traces, sub_traces = drawer.make_traces( + main_chart=main_chart, sub_chart=sub_chart, scale_value=scale_value, **kwargs + ) + + # fix sub traces as the bottom + if sub_traces: + yaxis, y, layout = self.make_y_layout(index=1, total=1, domain_range=(0, 0.2)) + # update sub_traces with yaxis + for trace in sub_traces: + trace.yaxis = y + stacked_fig.add_traces(sub_traces) + stacked_fig.layout[yaxis] = layout + + # make y layouts + yaxis, y, layout = self.make_y_layout( + index=index, total=total, start_index=start, domain_range=domain_range + ) + + stacked_fig.layout[yaxis] = layout + + # update traces with yaxis + for trace in traces: + trace.yaxis = y + stacked_fig.add_traces(traces) + + # update shapes with yaxis + if drawer.rects: + for rect in drawer.rects: + stacked_fig.add_shape( + type="rect", + x0=rect.x0, + y0=rect.y0, + x1=rect.x1, + y1=rect.y1, + line=dict(color="RoyalBlue", width=1), + # fillcolor="LightSkyBlue", + yref=y, + ) + + # annotations + if pd_is_not_null(drawer.annotation_df): + stacked_fig.layout["annotations"] = annotations(drawer.annotation_df, yref=y) + + stacked_fig.update_layout( + self.default_layout( + main_chart=main_chart, width=width, height=height, title=title, keep_ui_state=keep_ui_state + ) + ) + + if show: + stacked_fig.show() + else: + return stacked_fig + + +class Drawer(Draw): + def __init__( + self, + main_df: pd.DataFrame = None, + factor_df_list: List[pd.DataFrame] = None, + sub_df_list: pd.DataFrame = None, + main_data: NormalData = None, + factor_data_list: List[NormalData] = None, + sub_data_list: NormalData = None, + sub_col_chart: Optional[dict] = None, + rects: List[Rect] = None, + annotation_df: pd.DataFrame = None, + scale_value: int = None, + ) -> None: + """ + + :param main_df: df for main chart + :param factor_df_list: list of factor df on main chart + :param sub_df_list: df for sub chart under main chart + :param main_data: NormalData wrap main_df,use either + :param factor_data_list: list of NormalData wrap factor_df,use either + :param sub_data_list: NormalData wrap sub_df,use either + :param annotation_df: + """ + + #: 主图数据 + if main_data is None: + main_data = NormalData(main_df) + self.main_data: NormalData = main_data + + #: 主图因子 + if not factor_data_list and factor_df_list: + factor_data_list = [] + for df in factor_df_list: + factor_data_list.append(NormalData(df)) + #: 每一个df可能有多个column, 代表多个指标,对于连续型的,可以放在一个df里面 + #: 对于离散型的,比如一些特定模式的连线,放在多个df里面较好,因为index不同 + self.factor_data_list: List[NormalData] = factor_data_list + + #: 副图数据 + if not sub_data_list and sub_df_list: + sub_data_list = [] + for df in sub_df_list: + sub_data_list.append(NormalData(df)) + #: 每一个df可能有多个column, 代表多个指标,对于连续型的,可以放在一个df里面 + #: 对于离散型的,比如一些特定模式的连线,放在多个df里面较好,因为index不同 + self.sub_data_list: List[NormalData] = sub_data_list + + #: 幅图col对应的图形,line or bar + self.sub_col_chart = sub_col_chart + + #: 主图的标记数据 + self.annotation_df = annotation_df + + #: list of rect + self.rects = rects + + self.scale_value = scale_value + + def add_factor_df(self, df: pd.DataFrame): + self.add_factor_data(NormalData(df)) + + def add_factor_data(self, data: NormalData): + if not self.factor_data_list: + self.factor_data_list = [] + self.factor_data_list.append(data) + + def add_sub_df(self, df: pd.DataFrame): + self.add_sub_data(NormalData(df)) + + def add_sub_data(self, data: NormalData): + if not self.sub_data_list: + self.sub_data_list = [] + self.sub_data_list.append(data) + + def has_sub_plot(self): + return self.sub_data_list is not None and not self.sub_data_list[0].empty() + + def make_traces(self, main_chart=ChartType.kline, sub_chart="bar", yaxis="y", scale_value=None, **kwargs): + traces = [] + sub_traces = [] + + for entity_id, df in self.main_data.entity_map_df.items(): + df = df.select_dtypes(np.number) + df = df.copy() + if scale_value: + for col in df.columns: + first = None + for i in range(0, len(df)): + first = df[col][i] + if first != 0: + break + if first == 0: + continue + scale = scale_value / first + df[col] = df[col] * scale + code = entity_id + try: + _, _, code = decode_entity_id(entity_id) + except Exception: + pass + + # 构造主图 + if main_chart == ChartType.bar: + for col in df.columns: + trace_name = "{}_{}".format(code, col) + ydata = df[col].values.tolist() + traces.append(go.Bar(x=df.index, y=ydata, name=trace_name, yaxis=yaxis, **kwargs)) + elif main_chart == ChartType.kline: + trace_name = "{}_kdata".format(code) + trace = go.Candlestick( + x=df.index, + open=df["open"], + close=df["close"], + low=df["low"], + high=df["high"], + name=trace_name, + yaxis=yaxis, + **kwargs, + ) + traces.append(trace) + elif main_chart in [ChartType.scatter, ChartType.line, ChartType.area]: + mode = _zvt_chart_type_map_scatter_mode.get(main_chart) + for col in df.columns: + trace_name = "{}_{}".format(code, col) + ydata = df[col].values.tolist() + traces.append(go.Scatter(x=df.index, y=ydata, mode=mode, name=trace_name, yaxis=yaxis, **kwargs)) + elif main_chart == ChartType.histogram: + for col in df.columns: + trace_name = "{}_{}".format(code, col) + x = df[col].tolist() + trace = go.Histogram(x=x, name=trace_name, **kwargs) + traces.append(trace) + annotation = [ + dict( + entity_id=entity_id, + timestamp=x[-1], + value=0, + flag=f"{trace_name}:{x[-1]}", + ) + ] + annotation_df = pd.DataFrame.from_records(annotation, index=["entity_id", "timestamp"]) + if pd_is_not_null(self.annotation_df): + self.annotation_df = pd.concat([self.annotation_df, annotation_df]) + else: + self.annotation_df = annotation_df + elif main_chart == ChartType.pie: + for _, row in df.iterrows(): + traces.append(go.Pie(name=entity_id, labels=df.columns.tolist(), values=row.tolist(), **kwargs)) + else: + assert False + + # 构造主图指标 + if self.factor_data_list: + for factor_data in self.factor_data_list: + if not factor_data.empty(): + factor_df = factor_data.entity_map_df.get(entity_id) + factor_df = factor_df.select_dtypes(np.number) + if pd_is_not_null(factor_df): + for col in factor_df.columns: + trace_name = "{}_{}".format(code, col) + ydata = factor_df[col].values.tolist() + + line = go.Scatter( + x=factor_df.index, y=ydata, mode="lines", name=trace_name, yaxis=yaxis, **kwargs + ) + traces.append(line) + + # 构造幅图 + if self.has_sub_plot(): + for sub_data in self.sub_data_list: + sub_df = sub_data.entity_map_df.get(entity_id) + if pd_is_not_null(sub_df): + sub_df = sub_df.select_dtypes(np.number) + for col in sub_df.columns: + trace_name = "{}_{}".format(code, col) + ydata = sub_df[col].values.tolist() + + def color(i): + if i > 0: + return "red" + else: + return "green" + + colors = [color(i) for i in ydata] + + the_sub_chart = None + if self.sub_col_chart is not None: + the_sub_chart = self.sub_col_chart.get(col) + if not the_sub_chart: + the_sub_chart = sub_chart + + if the_sub_chart == ChartType.line: + sub_trace = go.Scatter( + x=sub_df.index, y=ydata, name=trace_name, yaxis="y2", marker=dict(color=colors) + ) + else: + sub_trace = go.Bar( + x=sub_df.index, y=ydata, name=trace_name, yaxis="y2", marker=dict(color=colors) + ) + sub_traces.append(sub_trace) + + return traces, sub_traces + + def add_rects(self, fig, yaxis="y"): + if self.rects: + for rect in self.rects: + fig.add_shape( + type="rect", + x0=rect.x0, + y0=rect.y0, + x1=rect.x1, + y1=rect.y1, + line=dict(color="RoyalBlue", width=1), + # fillcolor="LightSkyBlue" + ) + fig.update_shapes(dict(xref="x", yref=yaxis)) + + def draw( + self, + main_chart=ChartType.kline, + sub_chart="bar", + width=None, + height=None, + title=None, + keep_ui_state=True, + show=False, + scale_value=None, + **kwargs, + ): + yaxis = "y" + traces, sub_traces = self.make_traces( + main_chart=main_chart, sub_chart=sub_chart, yaxis=yaxis, scale_value=scale_value, **kwargs + ) + + if sub_traces: + fig = make_subplots(rows=2, cols=1, row_heights=[0.8, 0.2], vertical_spacing=0.08, shared_xaxes=True) + fig.add_traces(traces, rows=[1] * len(traces), cols=[1] * len(traces)) + fig.add_traces(sub_traces, rows=[2] * len(sub_traces), cols=[1] * len(sub_traces)) + else: + fig = go.Figure() + fig.add_traces(traces) + + # 绘制矩形 + self.add_rects(fig, yaxis=yaxis) + + fig.update_layout( + self.default_layout( + main_chart=main_chart, width=width, height=height, title=title, keep_ui_state=keep_ui_state + ) + ) + + if sub_traces: + fig.update_layout(xaxis_rangeslider_visible=False) + fig.update_layout(xaxis2_rangeslider_visible=True, xaxis2_rangeslider_thickness=0.1) + # 绘制标志 + if pd_is_not_null(self.annotation_df): + fig.layout["annotations"] = annotations(self.annotation_df, yref=yaxis) + + if show: + fig.show() + else: + return fig + + def draw_table(self, width=None, height=None, title=None, keep_ui_state=True, **kwargs): + cols = self.main_data.data_df.index.names + self.main_data.data_df.columns.tolist() + + index1 = self.main_data.data_df.index.get_level_values(0).tolist() + index2 = self.main_data.data_df.index.get_level_values(1).tolist() + values = [index1] + [index2] + [self.main_data.data_df[col] for col in self.main_data.data_df.columns] + + data = go.Table( + header=dict( + values=cols, + fill_color=["#000080", "#000080"] + ["#0066cc"] * len(self.main_data.data_df.columns), + align="left", + font=dict(color="white", size=13), + ), + cells=dict(values=values, fill=dict(color="#F5F8FF"), align="left"), + **kwargs, + ) + + fig = go.Figure() + fig.add_traces([data]) + fig.update_layout(self.default_layout(width=width, height=height, title=title, keep_ui_state=keep_ui_state)) + + fig.show() + + +def annotations(annotation_df: pd.DataFrame, yref="y"): + """ + annotation_df format:: + + value flag color + entity_id timestamp + + :param annotation_df: + :param yref: specific yaxis e.g, y,y2,y3 + :return: + """ + + if pd_is_not_null(annotation_df): + annotations = [] + for trace_name, df in annotation_df.groupby(level=0): + if pd_is_not_null(df): + for (_, timestamp), item in df.iterrows(): + if "color" in item: + color = item["color"] + else: + color = "#ec0000" + + value = round(item["value"], 2) + annotations.append( + dict( + x=timestamp, + y=value, + xref="x", + yref=yref, + text=item["flag"], + showarrow=True, + align="center", + arrowhead=2, + arrowsize=1, + arrowwidth=2, + # arrowcolor='#030813', + ax=-10, + ay=-30, + bordercolor="#c7c7c7", + borderwidth=1, + bgcolor=color, + opacity=0.8, + ) + ) + return annotations + return None + + +# the __all__ is generated +__all__ = ["ChartType", "Rect", "Draw", "Drawable", "StackedDrawer", "Drawer", "annotations"] diff --git a/src/zvt/contract/factor.py b/src/zvt/contract/factor.py new file mode 100644 index 00000000..00ade751 --- /dev/null +++ b/src/zvt/contract/factor.py @@ -0,0 +1,675 @@ +# -*- coding: utf-8 -*- +import json +import logging +import time +from enum import Enum +from typing import List, Union, Optional, Type + +import pandas as pd + +from zvt.contract import IntervalLevel +from zvt.contract import zvt_context +from zvt.contract.api import get_data, df_to_db, del_data +from zvt.contract.base_service import EntityStateService +from zvt.contract.reader import DataReader, DataListener +from zvt.contract.schema import Mixin, TradableEntity +from zvt.contract.zvt_info import FactorState +from zvt.utils.pd_utils import pd_is_not_null, drop_continue_duplicate, is_filter_result_df, is_score_result_df +from zvt.utils.str_utils import to_snake_str +from zvt.utils.time_utils import to_pd_timestamp + + +class TargetType(Enum): + positive = "positive" + negative = "negative" + keep = "keep" + + +class Indicator(object): + def __init__(self) -> None: + self.logger = logging.getLogger(self.__class__.__name__) + self.indicators = [] + + +class Transformer(Indicator): + def __init__(self) -> None: + super().__init__() + + def transform(self, input_df: pd.DataFrame) -> pd.DataFrame: + """ + input_df format:: + + col1 col2 col3 ... + entity_id timestamp + 1.2 0.5 0.3 ... + 1.0 0.7 0.2 ... + + the return result would change the columns and keep the format + + :param input_df: + :return: + """ + g = input_df.groupby(level=0) + if len(g.groups) == 1: + entity_id = input_df.index[0][0] + + df = input_df.reset_index(level=0, drop=True) + ret_df = self.transform_one(entity_id=entity_id, df=df) + ret_df["entity_id"] = entity_id + + return ret_df.set_index("entity_id", append=True).swaplevel(0, 1) + else: + return g.apply(lambda x: self.transform_one(x.index[0][0], x.reset_index(level=0, drop=True))) + + def transform_one(self, entity_id: str, df: pd.DataFrame) -> pd.DataFrame: + """ + df format:: + + col1 col2 col3 ... + timestamp + 1.2 0.5 0.3 ... + 1.0 0.7 0.2 ... + + the return result would change the columns and keep the format + + :param entity_id: + :param df: + :return: + """ + return df + + +class Accumulator(Indicator): + def __init__(self, acc_window: int = 1) -> None: + """ + + :param acc_window: the window size of acc for computing,default is 1 + """ + super().__init__() + self.acc_window = acc_window + + def acc(self, input_df: pd.DataFrame, acc_df: pd.DataFrame, states: dict) -> (pd.DataFrame, dict): + """ + + :param input_df: new input + :param acc_df: previous result + :param states: current states of the entity + :return: new result and states + """ + g = input_df.groupby(level=0) + if len(g.groups) == 1: + entity_id = input_df.index[0][0] + + df = input_df.reset_index(level=0, drop=True) + if pd_is_not_null(acc_df) and (entity_id == acc_df.index[0][0]): + acc_one_df = acc_df.reset_index(level=0, drop=True) + else: + acc_one_df = None + ret_df, state = self.acc_one(entity_id=entity_id, df=df, acc_df=acc_one_df, state=states.get(entity_id)) + if pd_is_not_null(ret_df): + ret_df["entity_id"] = entity_id + ret_df = ret_df.set_index("entity_id", append=True).swaplevel(0, 1) + ret_df["entity_id"] = entity_id + return ret_df, {entity_id: state} + return None, {entity_id: state} + else: + new_states = {} + + def cal_acc(x): + entity_id = x.index[0][0] + if pd_is_not_null(acc_df): + acc_g = acc_df.groupby(level=0) + acc_one_df = None + if entity_id in acc_g.groups: + acc_one_df = acc_g.get_group(entity_id) + if pd_is_not_null(acc_one_df): + acc_one_df = acc_one_df.reset_index(level=0, drop=True) + else: + acc_one_df = None + + one_result, state = self.acc_one( + entity_id=entity_id, + df=x.reset_index(level=0, drop=True), + acc_df=acc_one_df, + state=states.get(x.index[0][0]), + ) + + new_states[entity_id] = state + return one_result + + ret_df = g.apply(lambda x: cal_acc(x)) + return ret_df, new_states + + def acc_one(self, entity_id, df: pd.DataFrame, acc_df: pd.DataFrame, state: dict) -> (pd.DataFrame, dict): + """ + df format:: + + col1 col2 col3 ... + timestamp + 1.2 0.5 0.3 ... + 1.0 0.7 0.2 ... + + the new result and state + + :param df: current input df + :param entity_id: current computing entity_id + :param acc_df: current result of the entity_id + :param state: current state of the entity_id + :return: new result and state of the entity_id + """ + return acc_df, state + + +class Scorer(object): + def __init__(self) -> None: + self.logger = logging.getLogger(self.__class__.__name__) + + def score(self, input_df: pd.DataFrame) -> pd.DataFrame: + """ + + :param input_df: current input df + :return: df with normal score + """ + return input_df + + +def _register_class(target_class): + if target_class.__name__ not in ("Factor", "FilterFactor", "ScoreFactor", "StateFactor"): + zvt_context.factor_cls_registry[target_class.__name__] = target_class + + +class FactorMeta(type): + def __new__(meta, name, bases, class_dict): + cls = type.__new__(meta, name, bases, class_dict) + _register_class(cls) + return cls + + +class Factor(DataReader, EntityStateService, DataListener): + #: Schema for storing states + state_schema = FactorState + #: define the schema for persist,its columns should be same as indicators in transformer or accumulator + factor_schema: Type[Mixin] = None + + #: transformer for this factor if not passed as __init__ argument + transformer: Transformer = None + #: accumulator for this factor if not passed as __init__ argument + accumulator: Accumulator = None + + def __init__( + self, + data_schema: Type[Mixin], + entity_schema: Type[TradableEntity] = None, + provider: str = None, + entity_provider: str = None, + entity_ids: List[str] = None, + exchanges: List[str] = None, + codes: List[str] = None, + start_timestamp: Union[str, pd.Timestamp] = None, + end_timestamp: Union[str, pd.Timestamp] = None, + columns: List = None, + filters: List = None, + order: object = None, + limit: int = None, + level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY, + category_field: str = "entity_id", + time_field: str = "timestamp", + keep_window: int = None, + keep_all_timestamp: bool = False, + fill_method: str = "ffill", + effective_number: int = None, + transformer: Transformer = None, + accumulator: Accumulator = None, + need_persist: bool = False, + only_compute_factor: bool = False, + factor_name: str = None, + clear_state: bool = False, + only_load_factor: bool = False, + ) -> None: + """ + :param keep_all_timestamp: + :param fill_method: + :param effective_number: + :param transformer: + :param accumulator: + :param need_persist: whether persist factor + :param only_compute_factor: only compute factor nor result + :param factor_name: + :param clear_state: + :param only_load_factor: only load factor and compute result + """ + self.only_load_factor = only_load_factor + + #: define unique name of your factor if you want to keep factor state + #: the factor state is defined by factor_name and entity_id + if not factor_name: + self.name = to_snake_str(type(self).__name__) + else: + self.name = factor_name + + DataReader.__init__( + self, + data_schema, + entity_schema, + provider, + entity_provider, + entity_ids, + exchanges, + codes, + start_timestamp, + end_timestamp, + columns, + filters, + order, + limit, + level, + category_field, + time_field, + keep_window, + ) + + EntityStateService.__init__(self, entity_ids=entity_ids) + + self.clear_state = clear_state + + self.keep_all_timestamp = keep_all_timestamp + self.fill_method = fill_method + self.effective_number = effective_number + + if transformer: + self.transformer = transformer + else: + self.transformer = self.__class__.transformer + + if accumulator: + self.accumulator = accumulator + else: + self.accumulator = self.__class__.accumulator + + self.need_persist = need_persist + self.only_compute_factor = only_compute_factor + + #: 中间结果,不持久化 + #: data_df->pipe_df + self.pipe_df: pd.DataFrame = None + + #: 计算因子的结果,可持久化,通过对pipe_df的计算得到 + #: pipe_df->factor_df + self.factor_df: pd.DataFrame = None + + #: result_df是用于选股的标准df,通过对factor_df的计算得到 + #: factor_df->result_df + self.result_df: pd.DataFrame = None + + if self.clear_state: + self.clear_state_data() + elif self.need_persist or self.only_load_factor: + self.load_factor() + + #: 根据已经计算的factor_df和computing_window来保留data_df + #: 因为读取data_df的目的是为了计算factor_df,选股和回测只依赖factor_df + #: 所以如果有持久化的factor_df,只需保留需要用于计算的data_df即可 + if pd_is_not_null(self.data_df) and self.computing_window: + dfs = [] + for entity_id, df in self.data_df.groupby(level=0): + latest_laved = get_data( + provider="zvt", + data_schema=self.factor_schema, + entity_id=entity_id, + order=self.factor_schema.timestamp.desc(), + limit=1, + index=[self.category_field, self.time_field], + return_type="domain", + ) + if latest_laved: + df1 = df[df.timestamp < latest_laved[0].timestamp].iloc[-self.computing_window :] + if pd_is_not_null(df1): + df = df[df.timestamp >= df1.iloc[0].timestamp] + dfs.append(df) + + self.data_df = pd.concat(dfs) + + self.register_data_listener(self) + + #: the compute logic is not triggered from load data + #: for the case:1)load factor from db 2)compute the result + if self.only_load_factor: + self.compute() + + def load_data(self): + if self.only_load_factor: + return + super().load_data() + + def load_factor(self): + if self.only_compute_factor: + #: 如果只是为了计算因子,只需要读取acc_window的factor_df + if self.accumulator is not None: + self.factor_df = self.load_window_df( + provider="zvt", data_schema=self.factor_schema, window=self.accumulator.acc_window + ) + else: + self.factor_df = get_data( + provider="zvt", + data_schema=self.factor_schema, + start_timestamp=self.start_timestamp, + entity_ids=self.entity_ids, + end_timestamp=self.end_timestamp, + index=[self.category_field, self.time_field], + ) + + self.decode_factor_df(self.factor_df) + + def decode_factor_df(self, df): + col_map_object_hook = self.factor_col_map_object_hook() + if pd_is_not_null(df) and col_map_object_hook: + for col in col_map_object_hook: + if col in df.columns: + df[col] = df[col].apply( + lambda x: json.loads(x, object_hook=col_map_object_hook.get(col)) if x else None + ) + + def factor_col_map_object_hook(self) -> dict: + """ + + :return:{col:object_hook} + """ + return {} + + def clear_state_data(self, entity_id=None): + super().clear_state_data(entity_id=entity_id) + if entity_id: + del_data(self.factor_schema, filters=[self.factor_schema.entity_id == entity_id], provider="zvt") + else: + del_data(self.factor_schema, provider="zvt") + + def pre_compute(self): + if not self.only_load_factor and not pd_is_not_null(self.pipe_df): + self.pipe_df = self.data_df + + def do_compute(self): + self.logger.info("compute factor start") + self.compute_factor() + self.logger.info("compute factor finish") + + self.logger.info("compute result start") + self.compute_result() + self.logger.info("compute result finish") + + def compute_factor(self): + if self.only_load_factor: + return + #: 无状态的转换运算 + if pd_is_not_null(self.data_df) and self.transformer: + self.pipe_df = self.transformer.transform(self.data_df) + else: + self.pipe_df = self.data_df + + #: 有状态的累加运算 + if pd_is_not_null(self.pipe_df) and self.accumulator: + self.factor_df, self.states = self.accumulator.acc(self.pipe_df, self.factor_df, self.states) + else: + self.factor_df = self.pipe_df + + def compute_result(self): + if pd_is_not_null(self.factor_df): + cols = [] + if is_filter_result_df(self.factor_df): + cols.append("filter_result") + if is_score_result_df(self.factor_df): + cols.append("score_result") + + if cols: + self.result_df = self.factor_df[cols] + + def after_compute(self): + if self.only_load_factor: + return + if self.keep_all_timestamp: + self.fill_gap() + + if self.need_persist and pd_is_not_null(self.factor_df): + self.persist_factor() + + def compute(self): + self.pre_compute() + + self.logger.info(f"[[[ ~~~~~~~~factor:{self.name} ~~~~~~~~]]]") + self.logger.info("do_compute start") + start_time = time.time() + self.do_compute() + cost_time = time.time() - start_time + self.logger.info("do_compute finished,cost_time:{}s".format(cost_time)) + + self.logger.info("after_compute start") + start_time = time.time() + self.after_compute() + cost_time = time.time() - start_time + self.logger.info("after_compute finished,cost_time:{}s".format(cost_time)) + self.logger.info(f"[[[ ^^^^^^^^factor:{self.name} ^^^^^^^^]]]") + + def drawer_main_df(self) -> Optional[pd.DataFrame]: + if self.only_load_factor: + return self.factor_df + return self.data_df + + def drawer_factor_df_list(self) -> Optional[List[pd.DataFrame]]: + if (self.transformer is not None or self.accumulator is not None) and pd_is_not_null(self.factor_df): + indicators = None + if self.transformer is not None: + indicators = self.transformer.indicators + elif self.accumulator is not None: + indicators = self.accumulator.indicators + + if indicators: + return [self.factor_df[indicators]] + else: + return [self.factor_df] + return None + + def drawer_sub_df_list(self) -> Optional[List[pd.DataFrame]]: + if (self.transformer is not None or self.accumulator is not None) and pd_is_not_null(self.result_df): + return [self.result_df] + return None + + def drawer_annotation_df(self) -> Optional[pd.DataFrame]: + def order_type_flag(order_type): + if order_type is None: + return None + if order_type: + return "B" + if not order_type: + return "S" + + def order_type_color(order_type): + if order_type: + return "#ec0000" + else: + return "#00da3c" + + if is_filter_result_df(self.result_df): + annotation_df = self.result_df[["filter_result"]].copy() + annotation_df = annotation_df[~annotation_df["filter_result"].isna()] + annotation_df = drop_continue_duplicate(annotation_df, "filter_result") + annotation_df["value"] = self.factor_df.loc[annotation_df.index]["close"] + annotation_df["flag"] = annotation_df["filter_result"].apply(lambda x: order_type_flag(x)) + annotation_df["color"] = annotation_df["filter_result"].apply(lambda x: order_type_color(x)) + return annotation_df + + def fill_gap(self): + #: 该操作较慢,只适合做基本面的运算 + idx = pd.date_range(self.start_timestamp, self.end_timestamp) + new_index = pd.MultiIndex.from_product( + [self.result_df.index.levels[0], idx], names=[self.category_field, self.time_field] + ) + self.result_df = self.result_df.loc[~self.result_df.index.duplicated(keep="first")] + self.result_df = self.result_df.reindex(new_index) + self.result_df = self.result_df.groupby(level=0).fillna(method=self.fill_method, limit=self.effective_number) + + def add_entities(self, entity_ids): + if (self.entity_ids and entity_ids) and (set(self.entity_ids) == set(entity_ids)): + self.logger.info(f"current: {self.entity_ids}") + self.logger.info(f"refresh: {entity_ids}") + return + new_entity_ids = None + if entity_ids: + new_entity_ids = list(set(entity_ids) - set(self.entity_ids)) + self.entity_ids = list(set(self.entity_ids + entity_ids)) + + if new_entity_ids: + self.logger.info(f"added new entity: {new_entity_ids}") + if not self.only_load_factor: + new_data_df = self.data_schema.query_data( + entity_ids=new_entity_ids, + provider=self.provider, + columns=self.columns, + start_timestamp=self.start_timestamp, + end_timestamp=self.end_timestamp, + filters=self.filters, + order=self.order, + limit=self.limit, + level=self.level, + index=[self.category_field, self.time_field], + time_field=self.time_field, + ) + self.data_df = pd.concat([self.data_df, new_data_df], sort=False) + self.data_df.sort_index(level=[0, 1], inplace=True) + + new_factor_df = get_data( + provider="zvt", + data_schema=self.factor_schema, + start_timestamp=self.start_timestamp, + entity_ids=new_entity_ids, + end_timestamp=self.end_timestamp, + index=[self.category_field, self.time_field], + ) + self.decode_factor_df(new_factor_df) + + self.factor_df = pd.concat([self.factor_df, new_factor_df], sort=False) + self.factor_df.sort_index(level=[0, 1], inplace=True) + + def on_data_loaded(self, data: pd.DataFrame): + self.compute() + + def on_data_changed(self, data: pd.DataFrame): + """ + overwrite it for computing after data added + + :param data: + """ + self.compute() + + def on_entity_data_changed(self, entity, added_data: pd.DataFrame): + """ + overwrite it for computing after entity data added + + :param entity: + :param added_data: + """ + pass + + def persist_factor(self): + df = self.factor_df.copy() + #: encode json columns + if pd_is_not_null(df) and self.factor_col_map_object_hook(): + for col in self.factor_col_map_object_hook(): + if col in df.columns: + df[col] = df[col].apply(lambda x: json.dumps(x, cls=self.state_encoder())) + + if self.states: + g = df.groupby(level=0) + for entity_id in self.states: + state = self.states[entity_id] + try: + if state: + self.persist_state(entity_id=entity_id) + if entity_id in g.groups: + df_to_db( + df=df.loc[(entity_id,)], data_schema=self.factor_schema, provider="zvt", force_update=False + ) + except Exception as e: + self.logger.error(f"{self.name} {entity_id} save state error") + self.logger.exception(e) + #: clear them if error happen + self.clear_state_data(entity_id) + else: + df_to_db(df=df, data_schema=self.factor_schema, provider="zvt", force_update=False) + + def get_filter_df(self): + if is_filter_result_df(self.result_df): + return self.result_df[["filter_result"]] + + def get_score_df(self): + if is_score_result_df(self.result_df): + return self.result_df[["score_result"]] + + def get_trading_signal_df(self): + df = self.result_df[["filter_result"]].copy() + df = df[~df["filter_result"].isna()] + df = drop_continue_duplicate(df, "filter_result") + return df + + def get_targets( + self, + timestamp=None, + start_timestamp=None, + end_timestamp=None, + target_type: TargetType = TargetType.positive, + positive_threshold=0.8, + negative_threshold=-0.8, + ): + if timestamp and (start_timestamp or end_timestamp): + raise ValueError("Use timestamp or (start_timestamp, end_timestamp)") + # select by filter + filter_df = self.get_filter_df() + selected_df = None + target_df = None + if pd_is_not_null(filter_df): + if target_type == TargetType.positive: + selected_df = filter_df[filter_df["filter_result"] == True] + elif target_type == TargetType.negative: + selected_df = filter_df[filter_df["filter_result"] == False] + else: + selected_df = filter_df[filter_df["filter_result"].isna()] + + # select by score + score_df = self.get_score_df() + if pd_is_not_null(score_df): + if pd_is_not_null(selected_df): + # filter at first + score_df = score_df.loc[selected_df.index, :] + if target_type == TargetType.positive: + selected_df = score_df[score_df["score_result"] >= positive_threshold] + elif target_type == TargetType.negative: + selected_df = score_df[score_df["score_result"] <= negative_threshold] + else: + selected_df = score_df[ + (score_df["score_result"] > negative_threshold) & (score_df["score"] < positive_threshold) + ] + print(selected_df) + if pd_is_not_null(selected_df): + selected_df = selected_df.reset_index(level="entity_id") + if timestamp: + if to_pd_timestamp(timestamp) in selected_df.index: + target_df = selected_df.loc[[to_pd_timestamp(timestamp)], ["entity_id"]] + else: + target_df = selected_df.loc[ + slice(to_pd_timestamp(start_timestamp), to_pd_timestamp(end_timestamp)), ["entity_id"] + ] + + if pd_is_not_null(target_df): + return target_df["entity_id"].tolist() + return [] + + +class ScoreFactor(Factor): + scorer: Scorer = None + + def compute_result(self): + super().compute_result() + if pd_is_not_null(self.factor_df) and self.scorer: + self.result_df = self.scorer.score(self.factor_df) + + +# the __all__ is generated +__all__ = ["TargetType", "Indicator", "Transformer", "Accumulator", "Scorer", "FactorMeta", "Factor", "ScoreFactor"] diff --git a/src/zvt/contract/model.py b/src/zvt/contract/model.py new file mode 100644 index 00000000..39a3b0eb --- /dev/null +++ b/src/zvt/contract/model.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +from datetime import datetime + +from pydantic import BaseModel, ConfigDict + + +class CustomModel(BaseModel): + model_config = ConfigDict(from_attributes=True, allow_inf_nan=True) + + +class MixinModel(CustomModel): + id: str + entity_id: str + timestamp: datetime + + +# the __all__ is generated +__all__ = ["CustomModel", "MixinModel"] diff --git a/zvt/contract/normal_data.py b/src/zvt/contract/normal_data.py similarity index 77% rename from zvt/contract/normal_data.py rename to src/zvt/contract/normal_data.py index a6885656..c1e58755 100644 --- a/zvt/contract/normal_data.py +++ b/src/zvt/contract/normal_data.py @@ -6,11 +6,7 @@ class NormalData(object): table_type_sample = None - def __init__(self, - df, - category_field='entity_id', - time_field='timestamp', - fill_index: bool = False) -> None: + def __init__(self, df, category_field="entity_id", time_field="timestamp", fill_index: bool = False) -> None: self.data_df = df self.category_field = category_field self.time_field = time_field @@ -24,9 +20,10 @@ def __init__(self, def normalize(self): """ - normalize data_df to - col1 col2 col3 - entity_id timestamp + normalize data_df to:: + + col1 col2 col3 + entity_id timestamp """ if pd_is_not_null(self.data_df): @@ -47,4 +44,5 @@ def empty(self): return not pd_is_not_null(self.data_df) -__all__ = ['NormalData'] +# the __all__ is generated +__all__ = ["NormalData"] diff --git a/src/zvt/contract/reader.py b/src/zvt/contract/reader.py new file mode 100644 index 00000000..b0990b3e --- /dev/null +++ b/src/zvt/contract/reader.py @@ -0,0 +1,289 @@ +# -*- coding: utf-8 -*- +import logging +import time +from typing import List, Union, Type, Optional + +import pandas as pd + +from zvt.contract import IntervalLevel +from zvt.contract.api import get_entities +from zvt.contract.drawer import Drawable +from zvt.contract.schema import Mixin, TradableEntity +from zvt.utils.pd_utils import pd_is_not_null +from zvt.utils.time_utils import to_pd_timestamp, now_pd_timestamp + + +class DataListener(object): + def on_data_loaded(self, data: pd.DataFrame) -> object: + """ + + :param data: + """ + raise NotImplementedError + + def on_data_changed(self, data: pd.DataFrame) -> object: + """ + + :param data: + """ + raise NotImplementedError + + def on_entity_data_changed(self, entity: str, added_data: pd.DataFrame) -> object: + """ + + :param entity: the entity + :param added_data: the data added for the entity + """ + pass + + +class DataReader(Drawable): + logger = logging.getLogger(__name__) + + def __init__( + self, + data_schema: Type[Mixin], + entity_schema: Type[TradableEntity] = None, + provider: str = None, + entity_provider: str = None, + entity_ids: List[str] = None, + exchanges: List[str] = None, + codes: List[str] = None, + start_timestamp: Union[str, pd.Timestamp] = None, + end_timestamp: Union[str, pd.Timestamp] = now_pd_timestamp(), + columns: List = None, + filters: List = None, + order: object = None, + limit: int = None, + level: IntervalLevel = None, + category_field: str = "entity_id", + time_field: str = "timestamp", + keep_window: int = None, + ) -> None: + self.logger = logging.getLogger(self.__class__.__name__) + + self.data_schema = data_schema + self.entity_schema = entity_schema + self.provider = provider + self.entity_provider = entity_provider + self.start_timestamp = start_timestamp + self.end_timestamp = end_timestamp + self.start_timestamp = to_pd_timestamp(self.start_timestamp) + self.end_timestamp = to_pd_timestamp(self.end_timestamp) + self.exchanges = exchanges + self.codes = codes + self.entity_ids = entity_ids + + # 转换成标准entity_id + if entity_schema and not self.entity_ids: + df = get_entities( + entity_schema=entity_schema, provider=self.entity_provider, exchanges=self.exchanges, codes=self.codes + ) + if pd_is_not_null(df): + self.entity_ids = df["entity_id"].to_list() + + self.filters = filters + self.order = order + self.limit = limit + + if level: + self.level = IntervalLevel(level) + else: + self.level = level + + self.category_field = category_field + self.time_field = time_field + self.computing_window = keep_window + + self.category_col = eval("self.data_schema.{}".format(self.category_field)) + self.time_col = eval("self.data_schema.{}".format(self.time_field)) + + self.columns = columns + + if self.columns: + # always add category_column and time_field for normalizing + self.columns = list(set(self.columns) | {self.category_field, self.time_field}) + + self.data_listeners: List[DataListener] = [] + + self.data_df: pd.DataFrame = None + + self.load_data() + + def load_window_df(self, provider, data_schema, window): + window_df = None + + dfs = [] + for entity_id in self.entity_ids: + df = data_schema.query_data( + provider=provider, + index=[self.category_field, self.time_field], + order=data_schema.timestamp.desc(), + entity_id=entity_id, + limit=window, + ) + if pd_is_not_null(df): + dfs.append(df) + if dfs: + window_df = pd.concat(dfs) + window_df = window_df.sort_index(level=[0, 1]) + return window_df + + def load_data(self): + self.logger.info("load_data start") + start_time = time.time() + params = dict( + entity_size=len(self.entity_ids) if self.entity_ids != None else None, + provider=self.provider, + columns=self.columns, + start_timestamp=self.start_timestamp, + end_timestamp=self.end_timestamp, + filters=self.filters, + order=self.order, + limit=self.limit, + level=self.level, + index=[self.category_field, self.time_field], + time_field=self.time_field, + ) + self.logger.info(f"query_data params:{params}") + + self.data_df = self.data_schema.query_data( + entity_ids=self.entity_ids, + provider=self.provider, + columns=self.columns, + start_timestamp=self.start_timestamp, + end_timestamp=self.end_timestamp, + filters=self.filters, + order=self.order, + limit=self.limit, + level=self.level, + index=[self.category_field, self.time_field], + time_field=self.time_field, + ) + + cost_time = time.time() - start_time + self.logger.info("load_data finished, cost_time:{}".format(cost_time)) + + for listener in self.data_listeners: + listener.on_data_loaded(self.data_df) + + def move_on(self, to_timestamp: Union[str, pd.Timestamp] = None, timeout: int = 20) -> object: + """ + using continual fetching data in realtime + 1)get the data happened before to_timestamp,if not set,get all the data which means to now + 2)if computing_window set,the data_df would be cut for saving memory + + + :param to_timestamp: + :type to_timestamp: + :param timeout: + :type timeout: int + :return: + :rtype: + """ + + if not pd_is_not_null(self.data_df): + self.load_data() + return + + start_time = time.time() + + has_got = [] + dfs = [] + changed = False + while True: + for entity_id, df in self.data_df.groupby(level=0): + if entity_id in has_got: + continue + + recorded_timestamp = df.index.levels[1].max() + + #: move_on读取数据,表明之前的数据已经处理完毕,只需要保留computing_window的数据 + if self.computing_window: + df = df.iloc[-self.computing_window :] + + added_filter = [self.category_col == entity_id, self.time_col > recorded_timestamp] + if self.filters: + filters = self.filters + added_filter + else: + filters = added_filter + + added_df = self.data_schema.query_data( + provider=self.provider, + columns=self.columns, + end_timestamp=to_timestamp, + filters=filters, + level=self.level, + index=[self.category_field, self.time_field], + ) + + if pd_is_not_null(added_df): + self.logger.info(f'got new data:{df.to_json(orient="records", force_ascii=False)}') + + for listener in self.data_listeners: + listener.on_entity_data_changed(entity=entity_id, added_data=added_df) + #: if got data,just move to another entity_id + changed = True + has_got.append(entity_id) + # df = df.append(added_df, sort=False) + df = pd.concat([df, added_df], sort=False) + dfs.append(df) + else: + cost_time = time.time() - start_time + if cost_time > timeout: + #: if timeout,just add the old data + has_got.append(entity_id) + dfs.append(df) + self.logger.warning( + "category:{} level:{} getting data timeout,to_timestamp:{},now:{}".format( + entity_id, self.level, to_timestamp, now_pd_timestamp() + ) + ) + continue + + if len(has_got) == len(self.data_df.index.levels[0]): + break + + if dfs: + self.data_df = pd.concat(dfs, sort=False) + self.data_df.sort_index(level=[0, 1], inplace=True) + + if changed: + for listener in self.data_listeners: + listener.on_data_changed(self.data_df) + + def register_data_listener(self, listener): + if listener not in self.data_listeners: + self.data_listeners.append(listener) + + #: notify it once after registered + if pd_is_not_null(self.data_df): + listener.on_data_loaded(self.data_df) + + def deregister_data_listener(self, listener): + if listener in self.data_listeners: + self.data_listeners.remove(listener) + + def empty(self): + return not pd_is_not_null(self.data_df) + + def drawer_main_df(self) -> Optional[pd.DataFrame]: + return self.data_df + + +if __name__ == "__main__": + from zvt.domain import Stock1dKdata, Stock + + data_reader = DataReader( + data_schema=Stock1dKdata, + entity_schema=Stock, + codes=["002572", "000338"], + start_timestamp="2017-01-01", + end_timestamp="2019-06-10", + ) + + data_reader.draw(show=True) + + +# the __all__ is generated +__all__ = ["DataListener", "DataReader"] diff --git a/zvt/contract/recorder.py b/src/zvt/contract/recorder.py similarity index 52% rename from zvt/contract/recorder.py rename to src/zvt/contract/recorder.py index 1e521051..2d15c577 100644 --- a/zvt/contract/recorder.py +++ b/src/zvt/contract/recorder.py @@ -5,14 +5,24 @@ from typing import List import pandas as pd +import requests from sqlalchemy.orm import Session -from zvt.contract import IntervalLevel, Mixin, EntityMixin +from zvt.contract import IntervalLevel from zvt.contract.api import get_db_session, get_schema_columns from zvt.contract.api import get_entities, get_data -from zvt.utils import pd_is_not_null -from zvt.utils.time_utils import to_pd_timestamp, TIME_FORMAT_DAY, to_time_str, \ - evaluate_size_from_timestamp, is_in_same_interval, now_pd_timestamp, now_time_str +from zvt.contract.base_service import OneStateService +from zvt.contract.schema import Mixin, TradableEntity +from zvt.contract.utils import is_in_same_interval, evaluate_size_from_timestamp +from zvt.contract.zvt_info import RecorderState +from zvt.utils.pd_utils import pd_is_not_null +from zvt.utils.time_utils import ( + to_pd_timestamp, + TIME_FORMAT_DAY, + to_time_str, + now_pd_timestamp, + now_time_str, +) from zvt.utils.utils import fill_domain_from_dict @@ -20,91 +30,111 @@ class Meta(type): def __new__(meta, name, bases, class_dict): cls = type.__new__(meta, name, bases, class_dict) # register the recorder class to the data_schema - if hasattr(cls, 'data_schema') and hasattr(cls, 'provider'): + if hasattr(cls, "data_schema") and hasattr(cls, "provider"): if cls.data_schema and issubclass(cls.data_schema, Mixin): - print(f'{cls.__name__}:{cls.data_schema.__name__}') + print(f"{cls.__name__}:{cls.data_schema.__name__}") cls.data_schema.register_recorder_cls(cls.provider, cls) return cls -class Recorder(metaclass=Meta): - logger = logging.getLogger(__name__) - - # overwrite them to setup the data you want to record +class Recorder(OneStateService, metaclass=Meta): + #: overwrite them to set up the data you want to record provider: str = None data_schema: Mixin = None + #: original page url + original_page_url = None + #: request url url = None - def __init__(self, - batch_size: int = 10, - force_update: bool = False, - sleeping_time: int = 10) -> None: - """ + state_schema = RecorderState - :param batch_size:batch size to saving to db - :type batch_size:int - :param force_update: whether force update the data even if it exists,please set it to True if the data need to - be refreshed from the provider - :type force_update:bool - :param sleeping_time:sleeping seconds for recoding loop - :type sleeping_time:int - """ + def __init__(self, force_update: bool = False, sleeping_time: int = 10) -> None: + super().__init__() self.logger = logging.getLogger(self.__class__.__name__) assert self.provider is not None assert self.data_schema is not None - assert self.provider in self.data_schema.providers + if self.provider not in self.data_schema.providers: + self.logger.error( + f"provider: {self.provider} is not registered for {self.data_schema}({self.data_schema.providers})" + ) + assert False - self.batch_size = batch_size self.force_update = force_update self.sleeping_time = sleeping_time - # using to do db operations - self.session = get_db_session(provider=self.provider, - data_schema=self.data_schema) + #: using to do db operations + self.session = get_db_session(provider=self.provider, data_schema=self.data_schema) + self.http_session = requests.Session() def run(self): raise NotImplementedError - def sleep(self): - if self.sleeping_time > 0: - self.logger.info(f'sleeping {self.sleeping_time} seconds') + def sleep(self, seconds=None): + if seconds: + sleeping_time = seconds + else: + sleeping_time = self.sleeping_time + + if sleeping_time and sleeping_time > 0: + self.logger.info(f"sleeping {sleeping_time} seconds") time.sleep(self.sleeping_time) -class RecorderForEntities(Recorder): - # overwrite them to fetch the entity list +class EntityEventRecorder(Recorder): + #: overwrite them to fetch the entity list entity_provider: str = None - entity_schema: EntityMixin = None - - def __init__(self, entity_type='stock', exchanges=['sh', 'sz'], entity_ids=None, codes=None, day_data=False, - batch_size=10, force_update=False, sleeping_time=10, entity_filters=None) -> None: + entity_schema: TradableEntity = None + + def __init__( + self, + force_update=False, + sleeping_time=10, + exchanges=None, + entity_id=None, + entity_ids=None, + code=None, + codes=None, + day_data=False, + entity_filters=None, + ignore_failed=True, + return_unfinished=False, + ) -> None: """ + :param code: + :param ignore_failed: :param entity_filters: - :param entity_type: :param exchanges: + :param entity_id: for record single entity :param entity_ids: set entity_ids or (entity_type,exchanges,codes) :param codes: :param day_data: one record per day,set to True if you want skip recording it when data of today exist - :param batch_size: :param force_update: :param sleeping_time: """ - super().__init__(batch_size=batch_size, force_update=force_update, sleeping_time=sleeping_time) + super().__init__(force_update=force_update, sleeping_time=sleeping_time) assert self.entity_provider is not None assert self.entity_schema is not None - # setup the entities you want to record - self.entity_type = entity_type + #: setup the entities you want to record self.exchanges = exchanges - self.codes = codes + if codes is None and code is not None: + self.codes = [code] + else: + self.codes = codes self.day_data = day_data - # set entity_ids or (entity_type,exchanges,codes) - self.entity_ids = entity_ids + #: set entity_ids or (entity_type,exchanges,codes) + self.entity_ids = None + if entity_id: + self.entity_ids = [entity_id] + if entity_ids: + self.entity_ids = entity_ids self.entity_filters = entity_filters + self.ignore_failed = ignore_failed + self.return_unfinished = return_unfinished self.entity_session: Session = None self.entities: List = None @@ -121,84 +151,97 @@ def init_entities(self): self.entity_session = get_db_session(provider=self.entity_provider, data_schema=self.entity_schema) if self.day_data: - df = self.data_schema.query_data(start_timestamp=now_time_str(), columns=['entity_id', 'timestamp'], - provider=self.provider) + df = self.data_schema.query_data( + start_timestamp=now_time_str(), columns=["entity_id", "timestamp"], provider=self.provider + ) if pd_is_not_null(df): - entity_ids = df['entity_id'].tolist() - self.logger.info(f'ignore entity_ids:{entity_ids}') + entity_ids = df["entity_id"].tolist() + self.logger.info(f"ignore entity_ids:{entity_ids}") if self.entity_filters: self.entity_filters.append(self.entity_schema.entity_id.notin_(entity_ids)) else: self.entity_filters = [self.entity_schema.entity_id.notin_(entity_ids)] - # init the entity list - self.entities = get_entities(session=self.entity_session, - entity_schema=self.entity_schema, - entity_type=self.entity_type, - exchanges=self.exchanges, - entity_ids=self.entity_ids, - codes=self.codes, - return_type='domain', - provider=self.entity_provider, - filters=self.entity_filters) - - -class TimeSeriesDataRecorder(RecorderForEntities): - def __init__(self, - entity_type='stock', - exchanges=['sh', 'sz'], - entity_ids=None, - codes=None, - day_data=False, - batch_size=10, - force_update=False, - sleeping_time=5, - default_size=2000, - real_time=False, - fix_duplicate_way='add', - start_timestamp=None, - end_timestamp=None, - close_hour=0, - close_minute=0, - entity_filters=None) -> None: - - self.default_size = default_size - self.real_time = real_time - - self.close_hour = close_hour - self.close_minute = close_minute - - self.fix_duplicate_way = fix_duplicate_way - + #: init the entity list + self.entities = get_entities( + session=self.entity_session, + entity_schema=self.entity_schema, + exchanges=self.exchanges, + entity_ids=self.entity_ids, + codes=self.codes, + return_type="domain", + provider=self.entity_provider, + filters=self.entity_filters, + ) + + +class TimeSeriesDataRecorder(EntityEventRecorder): + default_size = 2000 + + def __init__( + self, + force_update=False, + sleeping_time=5, + exchanges=None, + entity_id=None, + entity_ids=None, + code=None, + codes=None, + day_data=False, + entity_filters=None, + ignore_failed=True, + real_time=False, + fix_duplicate_way="add", + start_timestamp=None, + end_timestamp=None, + return_unfinished=False, + ) -> None: self.start_timestamp = to_pd_timestamp(start_timestamp) self.end_timestamp = to_pd_timestamp(end_timestamp) + super().__init__( + force_update, + sleeping_time, + exchanges, + entity_id, + entity_ids, + code=code, + codes=codes, + day_data=day_data, + entity_filters=entity_filters, + ignore_failed=ignore_failed, + return_unfinished=return_unfinished, + ) - super().__init__(entity_type, exchanges, entity_ids, codes, day_data, batch_size, force_update, sleeping_time, - entity_filters) + self.real_time = real_time + self.close_hour, self.close_minute = self.entity_schema.get_close_hour_and_minute() + self.fix_duplicate_way = fix_duplicate_way def get_latest_saved_record(self, entity): - order = eval('self.data_schema.{}.desc()'.format(self.get_evaluated_time_field())) - - records = get_data(entity_id=entity.id, - provider=self.provider, - data_schema=self.data_schema, - order=order, - limit=1, - return_type='domain', - session=self.session) + order = eval("self.data_schema.{}.desc()".format(self.get_evaluated_time_field())) + + records = get_data( + entity_id=entity.id, + provider=self.provider, + data_schema=self.data_schema, + order=order, + limit=1, + return_type="domain", + session=self.session, + ) if records: return records[0] return None def evaluate_start_end_size_timestamps(self, entity): - # not to list date yet + #: not to list date yet if entity.timestamp and (entity.timestamp >= now_pd_timestamp()): + self.logger.info("ignore entity: {} list date: {}", entity.id, entity.timestamp) return entity.timestamp, None, 0, None latest_saved_record = self.get_latest_saved_record(entity=entity) if latest_saved_record: - latest_timestamp = eval('latest_saved_record.{}'.format(self.get_evaluated_time_field())) + latest_timestamp = eval("latest_saved_record.{}".format(self.get_evaluated_time_field())) else: latest_timestamp = entity.timestamp @@ -244,10 +287,10 @@ def get_evaluated_time_field(self): the timestamp field for evaluating time range of recorder,used in get_latest_saved_record """ - return 'timestamp' + return "timestamp" def get_original_time_field(self): - return 'timestamp' + return "timestamp" def generate_domain_id(self, entity, original_data, time_fmt=TIME_FORMAT_DAY): """ @@ -275,21 +318,27 @@ def generate_domain(self, entity, original_data): got_new_data = False - # if the domain is directly generated in record method, we just return it + #: if the domain is directly generated in record method, we just return it if isinstance(original_data, self.data_schema): got_new_data = True return got_new_data, original_data the_id = self.generate_domain_id(entity, original_data) - # optional way - # item = self.session.query(self.data_schema).get(the_id) + #: optional way + #: item = self.session.query(self.data_schema).get(the_id) - items = get_data(data_schema=self.data_schema, session=self.session, provider=self.provider, - entity_id=entity.id, filters=[self.data_schema.id == the_id], return_type='domain') + items = get_data( + data_schema=self.data_schema, + session=self.session, + provider=self.provider, + entity_id=entity.id, + filters=[self.data_schema.id == the_id], + return_type="domain", + ) if items and not self.force_update: - self.logger.info('ignore the data {}:{} saved before'.format(self.data_schema, the_id)) + self.logger.info("ignore the data {}:{} saved before".format(self.data_schema, the_id)) return got_new_data, None if not items: @@ -300,17 +349,12 @@ def generate_domain(self, entity, original_data): except Exception as e: self.logger.exception(e) - if 'name' in get_schema_columns(self.data_schema): - domain_item = self.data_schema(id=the_id, - code=entity.code, - name=entity.name, - entity_id=entity.id, - timestamp=timestamp) + if "name" in get_schema_columns(self.data_schema): + domain_item = self.data_schema( + id=the_id, code=entity.code, name=entity.name, entity_id=entity.id, timestamp=timestamp + ) else: - domain_item = self.data_schema(id=the_id, - code=entity.code, - entity_id=entity.id, - timestamp=timestamp) + domain_item = self.data_schema(id=the_id, code=entity.code, entity_id=entity.id, timestamp=timestamp) got_new_data = True else: domain_item = items[0] @@ -339,7 +383,9 @@ def persist(self, entity, domain_list): self.logger.info( "persist {} for entity_id:{},time interval:[{},{}]".format( - self.data_schema, entity.id, first_timestamp, last_timestamp)) + self.data_schema, entity.id, first_timestamp, last_timestamp + ) + ) self.session.add_all(domain_list) self.session.commit() @@ -351,6 +397,8 @@ def on_finish(self): if self.entity_session: self.entity_session.close() + if self.http_session: + self.http_session.close() except Exception as e: self.logger.error(e) @@ -365,45 +413,44 @@ def run(self): count = len(unfinished_items) for index, entity_item in enumerate(unfinished_items): try: - self.logger.info(f'run to {index + 1}/{count}') + self.logger.info(f"run to {index + 1}/{count}") start_timestamp, end_timestamp, size, timestamps = self.evaluate_start_end_size_timestamps( - entity_item) + entity_item + ) size = int(size) if timestamps: - self.logger.info('entity_id:{},evaluate_start_end_size_timestamps result:{},{},{},{}-{}'.format( - entity_item.id, - start_timestamp, - end_timestamp, - size, - timestamps[0], - timestamps[-1])) + self.logger.info( + "entity_id:{},evaluate_start_end_size_timestamps result:{},{},{},{}-{}".format( + entity_item.id, start_timestamp, end_timestamp, size, timestamps[0], timestamps[-1] + ) + ) else: - self.logger.info('entity_id:{},evaluate_start_end_size_timestamps result:{},{},{},{}'.format( - entity_item.id, - start_timestamp, - end_timestamp, - size, - timestamps)) - - # no more to record + self.logger.info( + "entity_id:{},evaluate_start_end_size_timestamps result:{},{},{},{}".format( + entity_item.id, start_timestamp, end_timestamp, size, timestamps + ) + ) + + #: no more to record if size == 0: finished_items.append(entity_item) self.logger.info( "finish recording {} for entity_id:{},latest_timestamp:{}".format( - self.data_schema, - entity_item.id, - start_timestamp)) + self.data_schema, entity_item.id, start_timestamp + ) + ) self.on_finish_entity(entity_item) continue - # sleep for a while to next entity + #: sleep for a while to next entity if index != 0: self.sleep() - original_list = self.record(entity_item, start=start_timestamp, end=end_timestamp, size=size, - timestamps=timestamps) + original_list = self.record( + entity_item, start=start_timestamp, end=end_timestamp, size=size, timestamps=timestamps + ) all_duplicated = True @@ -415,16 +462,16 @@ def run(self): if got_new_data: all_duplicated = False - # handle the case generate_domain_id generate duplicate id + #: handle the case generate_domain_id generate duplicate id if domain_item: duplicate = [item for item in domain_list if item.id == domain_item.id] if duplicate: - # regenerate the id - if self.fix_duplicate_way == 'add': + #: regenerate the id + if self.fix_duplicate_way == "add": domain_item.id = "{}_{}".format(domain_item.id, uuid.uuid1()) - # ignore + #: ignore else: - self.logger.info(f'ignore original duplicate item:{domain_item.id}') + self.logger.info(f"ignore original duplicate item:{domain_item.id}") continue domain_list.append(domain_item) @@ -432,47 +479,52 @@ def run(self): if domain_list: self.persist(entity_item, domain_list) else: - self.logger.info('just got {} duplicated data in this cycle'.format(len(original_list))) + self.logger.info("just got {} duplicated data in this cycle".format(len(original_list))) - # could not get more data + #: could not get more data entity_finished = False if not original_list or all_duplicated: - # not realtime + #: not realtime if not self.real_time: entity_finished = True - # realtime and to the close time - if self.real_time and \ - (self.close_hour is not None) and \ - (self.close_minute is not None): + #: realtime and to the close time + if self.real_time and (self.close_hour is not None) and (self.close_minute is not None): current_timestamp = pd.Timestamp.now() if current_timestamp.hour >= self.close_hour: if current_timestamp.minute - self.close_minute >= 5: self.logger.info( - '{} now is the close time:{}'.format(entity_item.id, current_timestamp)) + "{} now is the close time:{}".format(entity_item.id, current_timestamp) + ) entity_finished = True - # add finished entity to finished_items + #: add finished entity to finished_items if entity_finished: finished_items.append(entity_item) latest_saved_record = self.get_latest_saved_record(entity=entity_item) if latest_saved_record: - start_timestamp = eval('latest_saved_record.{}'.format(self.get_evaluated_time_field())) + start_timestamp = eval("latest_saved_record.{}".format(self.get_evaluated_time_field())) self.logger.info( "finish recording {} for entity_id:{},latest_timestamp:{}".format( - self.data_schema, - entity_item.id, - start_timestamp)) + self.data_schema, entity_item.id, start_timestamp + ) + ) self.on_finish_entity(entity_item) continue except Exception as e: self.logger.exception( - "recording data for entity_id:{},{},error:{}".format(entity_item.id, self.data_schema, e)) + "recording data for entity_id:{},{},error:{}".format(entity_item.id, self.data_schema, e) + ) raising_exception = e + if self.return_unfinished: + self.on_finish() + unfinished_items = set(unfinished_items) - set(finished_items) + return [item.entity_id for item in unfinished_items] + finished_items = unfinished_items break @@ -482,56 +534,74 @@ def run(self): break self.on_finish() + if self.return_unfinished: + return [] if raising_exception: raise raising_exception class FixedCycleDataRecorder(TimeSeriesDataRecorder): - def __init__(self, - entity_type='stock', - exchanges=['sh', 'sz'], - entity_ids=None, - codes=None, - day_data=False, - batch_size=10, - force_update=True, - sleeping_time=10, - default_size=2000, - real_time=False, - fix_duplicate_way='ignore', - start_timestamp=None, - end_timestamp=None, - close_hour=0, - close_minute=0, - # child add - level=IntervalLevel.LEVEL_1DAY, - kdata_use_begin_time=False, - one_day_trading_minutes=24 * 60, - entity_filters=None) -> None: - super().__init__(entity_type, exchanges, entity_ids, codes, day_data, batch_size, force_update, sleeping_time, - default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, - close_hour, close_minute, entity_filters) + def __init__( + self, + force_update=True, + sleeping_time=10, + exchanges=None, + entity_id=None, + entity_ids=None, + code=None, + codes=None, + day_data=False, + entity_filters=None, + ignore_failed=True, + real_time=False, + fix_duplicate_way="ignore", + start_timestamp=None, + end_timestamp=None, + level=IntervalLevel.LEVEL_1DAY, + kdata_use_begin_time=False, + one_day_trading_minutes=24 * 60, + return_unfinished=False, + ) -> None: + super().__init__( + force_update, + sleeping_time, + exchanges, + entity_id, + entity_ids, + code=code, + codes=codes, + day_data=day_data, + entity_filters=entity_filters, + ignore_failed=ignore_failed, + real_time=real_time, + fix_duplicate_way=fix_duplicate_way, + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + return_unfinished=return_unfinished, + ) self.level = IntervalLevel(level) self.kdata_use_begin_time = kdata_use_begin_time self.one_day_trading_minutes = one_day_trading_minutes def get_latest_saved_record(self, entity): - order = eval('self.data_schema.{}.desc()'.format(self.get_evaluated_time_field())) - - # 对于k线这种数据,最后一个记录有可能是没完成的,所以取两个 - # 同一周期内只保留最新的一个数据 - records = get_data(entity_id=entity.id, - provider=self.provider, - data_schema=self.data_schema, - order=order, - limit=2, - return_type='domain', - session=self.session, - level=self.level) + order = eval("self.data_schema.{}.desc()".format(self.get_evaluated_time_field())) + + #: 对于k线这种数据,最后一个记录有可能是没完成的,所以取两个 + #: 同一周期内只保留最新的一个数据 + records = get_data( + entity_id=entity.id, + provider=self.provider, + data_schema=self.data_schema, + order=order, + limit=2, + return_type="domain", + session=self.session, + level=self.level, + ) if records: - # delete unfinished kdata + #: delete unfinished kdata if len(records) == 2: if is_in_same_interval(t1=records[0].timestamp, t2=records[1].timestamp, level=self.level): self.session.delete(records[1]) @@ -540,25 +610,28 @@ def get_latest_saved_record(self, entity): return None def evaluate_start_end_size_timestamps(self, entity): - # not to list date yet + #: not to list date yet if entity.timestamp and (entity.timestamp >= now_pd_timestamp()): return entity.timestamp, None, 0, None - # get latest record + #: get latest record latest_saved_record = self.get_latest_saved_record(entity=entity) if latest_saved_record: - # the latest saved timestamp + #: the latest saved timestamp latest_saved_timestamp = latest_saved_record.timestamp else: - # the list date + #: the list date latest_saved_timestamp = entity.timestamp if not latest_saved_timestamp: return None, None, self.default_size, None - size = evaluate_size_from_timestamp(start_timestamp=latest_saved_timestamp, level=self.level, - one_day_trading_minutes=self.one_day_trading_minutes) + size = evaluate_size_from_timestamp( + start_timestamp=latest_saved_timestamp, + level=self.level, + one_day_trading_minutes=self.one_day_trading_minutes, + ) if self.start_timestamp: start = max(self.start_timestamp, latest_saved_timestamp) @@ -569,27 +642,39 @@ def evaluate_start_end_size_timestamps(self, entity): class TimestampsDataRecorder(TimeSeriesDataRecorder): - - def __init__(self, - entity_type='stock', - exchanges=['sh', 'sz'], - entity_ids=None, - codes=None, - day_data=False, - batch_size=10, - force_update=False, - sleeping_time=5, - default_size=2000, - real_time=False, - fix_duplicate_way='add', - start_timestamp=None, - end_timestamp=None, - close_hour=0, - close_minute=0, - entity_filters=None) -> None: - super().__init__(entity_type, exchanges, entity_ids, codes, day_data, batch_size, force_update, sleeping_time, - default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, - close_hour, close_minute, entity_filters) + def __init__( + self, + force_update=False, + sleeping_time=5, + exchanges=None, + entity_id=None, + entity_ids=None, + code=None, + codes=None, + day_data=False, + entity_filters=None, + ignore_failed=True, + real_time=False, + fix_duplicate_way="add", + start_timestamp=None, + end_timestamp=None, + ) -> None: + super().__init__( + force_update, + sleeping_time, + exchanges, + entity_id, + entity_ids, + code=code, + codes=codes, + day_data=day_data, + entity_filters=entity_filters, + ignore_failed=ignore_failed, + real_time=real_time, + fix_duplicate_way=fix_duplicate_way, + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + ) self.security_timestamps_map = {} def init_timestamps(self, entity_item) -> List[pd.Timestamp]: @@ -612,13 +697,12 @@ def evaluate_start_end_size_timestamps(self, entity): timestamps.sort() - self.logger.info( - 'entity_id:{},timestamps start:{},end:{}'.format(entity.id, timestamps[0], timestamps[-1])) + self.logger.info("entity_id:{},timestamps start:{},end:{}".format(entity.id, timestamps[0], timestamps[-1])) latest_record = self.get_latest_saved_record(entity=entity) if latest_record: - self.logger.info('latest record timestamp:{}'.format(latest_record.timestamp)) + self.logger.info("latest record timestamp:{}".format(latest_record.timestamp)) timestamps = [t for t in timestamps if t >= latest_record.timestamp] if timestamps: @@ -628,5 +712,12 @@ def evaluate_start_end_size_timestamps(self, entity): return timestamps[0], timestamps[-1], len(timestamps), timestamps -__all__ = ['Recorder', 'RecorderForEntities', 'FixedCycleDataRecorder', 'TimestampsDataRecorder', - 'TimeSeriesDataRecorder'] +# the __all__ is generated +__all__ = [ + "Meta", + "Recorder", + "EntityEventRecorder", + "TimeSeriesDataRecorder", + "FixedCycleDataRecorder", + "TimestampsDataRecorder", +] diff --git a/src/zvt/contract/register.py b/src/zvt/contract/register.py new file mode 100644 index 00000000..7a1e8f77 --- /dev/null +++ b/src/zvt/contract/register.py @@ -0,0 +1,162 @@ +# -*- coding: utf-8 -*- +import logging +from typing import List + +import sqlalchemy +from sqlalchemy import MetaData +from sqlalchemy.ext.declarative import DeclarativeMeta +from sqlalchemy.sql.ddl import CreateTable +from sqlalchemy.sql.expression import text + +from zvt.contract import zvt_context +from zvt.contract.api import get_db_engine, get_db_session_factory +from zvt.contract.schema import TradableEntity, Mixin +from zvt.utils.utils import add_to_map_list + +logger = logging.getLogger(__name__) + + +def register_entity(entity_type: str = None): + """ + function for register entity type + + :param entity_type: + :type entity_type: + :return: + :rtype: + """ + + def register(cls): + # register the entity + if issubclass(cls, TradableEntity): + entity_type_ = entity_type + if not entity_type: + entity_type_ = cls.__name__.lower() + + if entity_type_ not in zvt_context.tradable_entity_types: + zvt_context.tradable_entity_types.append(entity_type_) + zvt_context.tradable_entity_schemas.append(cls) + zvt_context.tradable_schema_map[entity_type_] = cls + return cls + + return register + + +def register_schema( + providers: List[str], + db_name: str, + schema_base: DeclarativeMeta, + entity_type: str = None, +): + """ + function for register schema,please declare them before register + + :param providers: the supported providers for the schema + :type providers: + :param db_name: database name for the schema + :type db_name: + :param schema_base: + :type schema_base: + :param entity_type: the schema related entity_type + :type entity_type: + :return: + :rtype: + """ + schemas = [] + for item in schema_base.registry.mappers: + cls = item.class_ + if type(cls) == DeclarativeMeta: + # register provider to the schema + for provider in providers: + if issubclass(cls, Mixin): + cls.register_provider(provider) + + if zvt_context.dbname_map_schemas.get(db_name): + schemas = zvt_context.dbname_map_schemas[db_name] + zvt_context.schemas.append(cls) + if entity_type: + add_to_map_list(the_map=zvt_context.entity_map_schemas, key=entity_type, value=cls) + schemas.append(cls) + + zvt_context.dbname_map_schemas[db_name] = schemas + + for provider in providers: + if provider not in zvt_context.providers: + zvt_context.providers.append(provider) + + if not zvt_context.provider_map_dbnames.get(provider): + zvt_context.provider_map_dbnames[provider] = [] + zvt_context.provider_map_dbnames[provider].append(db_name) + zvt_context.dbname_map_base[db_name] = schema_base + + # create the db & table + engine = get_db_engine(provider, db_name=db_name) + schema_base.metadata.create_all(bind=engine) + session_fac = get_db_session_factory(provider, db_name=db_name) + session_fac.configure(bind=engine) + + for provider in providers: + engine = get_db_engine(provider, db_name=db_name) + + # create index for 'timestamp','entity_id','code','report_period','updated_timestamp + for table_name, table in iter(schema_base.metadata.tables.items()): + # auto add new columns + db_meta = MetaData() + db_meta.reflect(bind=engine) + db_table = db_meta.tables[table_name] + existing_columns = [c.name for c in db_table.columns] + added_columns = [c for c in table.columns if c.name not in existing_columns] + index_list = [] + with engine.connect() as con: + # FIXME: close WAL mode for saving space, most of time no need to write in multiple process + if db_name in ("zvt_info", "stock_news", "stock_tags"): + con.execute(text("PRAGMA journal_mode=WAL;")) + else: + con.execute(text("PRAGMA journal_mode=DELETE;")) + + rs = con.execute(text("PRAGMA INDEX_LIST('{}')".format(table_name))) + for row in rs: + index_list.append(row[1]) + + try: + # Using migration tool like Alembic is too complex + # So we just support add new column, for others just change the db manually + if added_columns: + ddl_c = engine.dialect.ddl_compiler(engine.dialect, CreateTable(table)) + for added_column in added_columns: + stmt = text( + f"ALTER TABLE {table_name} ADD COLUMN {ddl_c.get_column_specification(added_column)}" + ) + logger.info(f"{engine.url} migrations:\n {stmt}") + con.execute(stmt) + + logger.debug("engine:{},table:{},index:{}".format(engine, table_name, index_list)) + + for col in [ + "timestamp", + "entity_id", + "code", + "report_period", + "created_timestamp", + "updated_timestamp", + ]: + if col in table.c: + column = eval("table.c.{}".format(col)) + index_name = "{}_{}_index".format(table_name, col) + if index_name not in index_list: + index = sqlalchemy.schema.Index(index_name, column) + index.create(engine) + for cols in [("timestamp", "entity_id"), ("timestamp", "code")]: + if (cols[0] in table.c) and (col[1] in table.c): + column0 = eval("table.c.{}".format(col[0])) + column1 = eval("table.c.{}".format(col[1])) + index_name = "{}_{}_{}_index".format(table_name, col[0], col[1]) + if index_name not in index_list: + index = sqlalchemy.schema.Index(index_name, column0, column1) + index.create(engine) + except Exception as e: + logger.error(e) + + +# the __all__ is generated +__all__ = ["register_entity", "register_schema"] diff --git a/src/zvt/contract/schema.py b/src/zvt/contract/schema.py new file mode 100644 index 00000000..18c3e865 --- /dev/null +++ b/src/zvt/contract/schema.py @@ -0,0 +1,585 @@ +# -*- coding: utf-8 -*- +import inspect +from datetime import timedelta +from typing import List, Union + +import pandas as pd +from sqlalchemy import Column, String, DateTime, Float +from sqlalchemy.orm import Session + +from zvt.contract import IntervalLevel +from zvt.utils.time_utils import date_and_time, is_same_time, now_pd_timestamp + + +class Mixin(object): + """ + Base class of schema. + """ + + #: id + id = Column(String, primary_key=True) + #: entity id + entity_id = Column(String) + + #: the meaning could be different for different case,most time it means 'happen time' + timestamp = Column(DateTime) + + # unix epoch,same meaning with timestamp + # ts = Column(BIGINT) + + @classmethod + def help(cls): + print(inspect.getsource(cls)) + + @classmethod + def important_cols(cls): + return [] + + @classmethod + def time_field(cls): + return "timestamp" + + @classmethod + def register_recorder_cls(cls, provider, recorder_cls): + """ + register the recorder for the schema + + :param provider: + :param recorder_cls: + """ + # don't make provider_map_recorder as class field,it should be created for the sub class as need + if not hasattr(cls, "provider_map_recorder"): + cls.provider_map_recorder = {} + + if provider not in cls.provider_map_recorder: + cls.provider_map_recorder[provider] = recorder_cls + + @classmethod + def register_provider(cls, provider): + """ + register the provider to the schema defined by cls + + :param provider: + """ + # don't make providers as class field,it should be created for the sub class as need + if not hasattr(cls, "providers"): + cls.providers = [] + + if provider not in cls.providers: + cls.providers.append(provider) + + @classmethod + def get_providers(cls) -> List[str]: + """ + providers of the schema defined by cls + + :return: providers + """ + assert hasattr(cls, "providers") + return cls.providers + + @classmethod + def test_data_correctness(cls, provider, data_samples): + for data in data_samples: + item = cls.query_data(provider=provider, ids=[data["id"]], return_type="dict") + print(item) + for k in data: + if k == "timestamp": + assert is_same_time(item[0][k], data[k]) + else: + assert item[0][k] == data[k] + + @classmethod + def get_by_id(cls, id, provider_index: int = 0, provider: str = None): + from .api import get_by_id + + if not provider: + provider = cls.providers[provider_index] + return get_by_id(data_schema=cls, id=id, provider=provider) + + @classmethod + def query_data( + cls, + provider_index: int = 0, + ids: List[str] = None, + entity_ids: List[str] = None, + entity_id: str = None, + codes: List[str] = None, + code: str = None, + level: Union[IntervalLevel, str] = None, + provider: str = None, + columns: List = None, + col_label: dict = None, + return_type: str = "df", + start_timestamp: Union[pd.Timestamp, str] = None, + end_timestamp: Union[pd.Timestamp, str] = None, + filters: List = None, + session: Session = None, + order=None, + limit: int = None, + distinct=None, + index: Union[str, list] = None, + drop_index_col=False, + time_field: str = "timestamp", + ): + """ + query data by the arguments + + :param provider_index: + :param data_schema: + :param ids: + :param entity_ids: + :param entity_id: + :param codes: + :param code: + :param level: + :param provider: + :param columns: + :param col_label: dict with key(column), value(label) + :param return_type: df, domain or dict. default is df + :param start_timestamp: + :param end_timestamp: + :param filters: + :param session: + :param order: + :param limit: + :param index: index field name, str for single index, str list for multiple index + :param drop_index_col: whether drop the col if it's in index, default False + :param time_field: + :return: results basing on return_type. + """ + from .api import get_data + + if not provider: + provider = cls.providers[provider_index] + return get_data( + data_schema=cls, + ids=ids, + entity_ids=entity_ids, + entity_id=entity_id, + codes=codes, + code=code, + level=level, + provider=provider, + columns=columns, + col_label=col_label, + return_type=return_type, + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + filters=filters, + session=session, + order=order, + limit=limit, + index=index, + distinct=distinct, + drop_index_col=drop_index_col, + time_field=time_field, + ) + + @classmethod + def get_storages( + cls, + provider: str = None, + ): + """ + get the storages info + + :param provider: provider + :return: storages + """ + if not provider: + providers = cls.get_providers() + else: + providers = [provider] + from zvt.contract.api import get_db_engine + + engines = [] + for p in providers: + engines.append(get_db_engine(provider=p, data_schema=cls)) + return engines + + @classmethod + def record_data( + cls, + provider_index: int = 0, + provider: str = None, + force_update=None, + sleeping_time=None, + exchanges=None, + entity_id=None, + entity_ids=None, + code=None, + codes=None, + real_time=None, + fix_duplicate_way=None, + start_timestamp=None, + end_timestamp=None, + one_day_trading_minutes=None, + **kwargs, + ): + """ + record data by the arguments + + :param entity_id: + :param provider_index: + :param provider: + :param force_update: + :param sleeping_time: + :param exchanges: + :param entity_ids: + :param code: + :param codes: + :param real_time: + :param fix_duplicate_way: + :param start_timestamp: + :param end_timestamp: + :param one_day_trading_minutes: + :param kwargs: + :return: + """ + if cls.provider_map_recorder: + print(f"{cls.__name__} registered recorders:{cls.provider_map_recorder}") + + if provider: + recorder_class = cls.provider_map_recorder[provider] + else: + recorder_class = cls.provider_map_recorder[cls.providers[provider_index]] + + # get args for specific recorder class + from zvt.contract.recorder import TimeSeriesDataRecorder + + if issubclass(recorder_class, TimeSeriesDataRecorder): + args = [ + item + for item in inspect.getfullargspec(cls.record_data).args + if item not in ("cls", "provider_index", "provider") + ] + else: + args = ["force_update", "sleeping_time"] + + #: just fill the None arg to kw,so we could use the recorder_class default args + kw = {} + for arg in args: + tmp = eval(arg) + if tmp is not None: + kw[arg] = tmp + + #: FixedCycleDataRecorder + from zvt.contract.recorder import FixedCycleDataRecorder + + if issubclass(recorder_class, FixedCycleDataRecorder): + #: contract: + #: 1)use FixedCycleDataRecorder to record the data with IntervalLevel + #: 2)the table of schema with IntervalLevel format is {entity}_{level}_[adjust_type]_{event} + table: str = cls.__tablename__ + try: + items = table.split("_") + if len(items) == 4: + adjust_type = items[2] + kw["adjust_type"] = adjust_type + level = IntervalLevel(items[1]) + except: + #: for other schema not with normal format,but need to calculate size for remaining days + level = IntervalLevel.LEVEL_1DAY + + kw["level"] = level + + #: add other custom args + for k in kwargs: + kw[k] = kwargs[k] + + r = recorder_class(**kw) + return r.run() + else: + r = recorder_class(**kw) + return r.run() + else: + print(f"no recorders for {cls.__name__}") + + +class NormalMixin(Mixin): + #: the record created time in db + created_timestamp = Column(DateTime, default=pd.Timestamp.now()) + #: the record updated time in db, some recorder would check it for whether need to refresh + updated_timestamp = Column(DateTime) + + +class Entity(Mixin): + #: 标的类型 + entity_type = Column(String(length=64)) + #: 所属交易所 + exchange = Column(String(length=32)) + #: 编码 + code = Column(String(length=64)) + #: 名字 + name = Column(String(length=128)) + #: 上市日 + list_date = Column(DateTime) + #: 退市日 + end_date = Column(DateTime) + + +class TradableEntity(Entity): + """ + tradable entity + """ + + @classmethod + def get_trading_dates(cls, start_date=None, end_date=None): + """ + overwrite it to get the trading dates of the entity + + :param start_date: + :param end_date: + :return: list of dates + """ + return pd.date_range(start_date, end_date, freq="B") + + @classmethod + def get_trading_intervals(cls, include_bidding_time=False): + """ + overwrite it to get the trading intervals of the entity + + :return: list of time intervals, in format [(start,end)] + """ + if include_bidding_time: + return [("09:20", "11:30"), ("13:00", "15:00")] + else: + return [("09:30", "11:30"), ("13:00", "15:00")] + + @classmethod + def in_real_trading_time(cls, timestamp=None): + if not timestamp: + timestamp = now_pd_timestamp() + else: + timestamp = pd.Timestamp(timestamp) + for open_close in cls.get_trading_intervals(include_bidding_time=True): + open_time = date_and_time(the_date=timestamp.date(), the_time=open_close[0]) + close_time = date_and_time(the_date=timestamp.date(), the_time=open_close[1]) + if open_time <= timestamp <= close_time: + return True + else: + continue + return False + + @classmethod + def in_trading_time(cls, timestamp=None): + if not timestamp: + timestamp = now_pd_timestamp() + else: + timestamp = pd.Timestamp(timestamp) + open_time = date_and_time( + the_date=timestamp.date(), the_time=cls.get_trading_intervals(include_bidding_time=True)[0][0] + ) + close_time = date_and_time( + the_date=timestamp.date(), the_time=cls.get_trading_intervals(include_bidding_time=True)[-1][1] + ) + return open_time <= timestamp <= close_time + + @classmethod + def get_close_hour_and_minute(cls): + hour, minute = cls.get_trading_intervals()[-1][1].split(":") + return int(hour), int(minute) + + @classmethod + def get_interval_timestamps(cls, start_date, end_date, level: IntervalLevel): + """ + generate the timestamps for the level + + :param start_date: + :param end_date: + :param level: + """ + + for current_date in cls.get_trading_dates(start_date=start_date, end_date=end_date): + if level == IntervalLevel.LEVEL_1DAY: + yield current_date + elif level == IntervalLevel.LEVEL_1WEEK: + if current_date.weekday() == 4: + yield current_date + else: + start_end_list = cls.get_trading_intervals() + + for start_end in start_end_list: + start = start_end[0] + end = start_end[1] + + current_timestamp = date_and_time(the_date=current_date, the_time=start) + end_timestamp = date_and_time(the_date=current_date, the_time=end) + + while current_timestamp <= end_timestamp: + yield current_timestamp + current_timestamp = current_timestamp + timedelta(minutes=level.to_minute()) + + @classmethod + def is_open_timestamp(cls, timestamp): + timestamp = pd.Timestamp(timestamp) + return is_same_time( + timestamp, + date_and_time(the_date=timestamp.date(), the_time=cls.get_trading_intervals()[0][0]), + ) + + @classmethod + def is_close_timestamp(cls, timestamp): + timestamp = pd.Timestamp(timestamp) + return is_same_time( + timestamp, + date_and_time(the_date=timestamp.date(), the_time=cls.get_trading_intervals()[-1][1]), + ) + + @classmethod + def is_finished_kdata_timestamp(cls, timestamp: pd.Timestamp, level: IntervalLevel): + """ + :param timestamp: the timestamp could be recorded in kdata of the level + :type timestamp: pd.Timestamp + :param level: + :type level: zvt.domain.common.IntervalLevel + :return: + :rtype: bool + """ + timestamp = pd.Timestamp(timestamp) + + for t in cls.get_interval_timestamps(timestamp.date(), timestamp.date(), level=level): + if is_same_time(t, timestamp): + return True + + return False + + @classmethod + def could_short(cls): + """ + whether could be shorted + + :return: + """ + return False + + @classmethod + def get_trading_t(cls): + """ + 0 means t+0 + 1 means t+1 + + :return: + """ + return 1 + + +class ActorEntity(Entity): + pass + + +class NormalEntityMixin(TradableEntity): + #: the record created time in db + created_timestamp = Column(DateTime, default=pd.Timestamp.now()) + #: the record updated time in db, some recorder would check it for whether need to refresh + updated_timestamp = Column(DateTime) + + +class Portfolio(TradableEntity): + """ + composition of tradable entities + """ + + @classmethod + def get_stocks( + cls, + code=None, + codes=None, + ids=None, + timestamp=now_pd_timestamp(), + provider=None, + ): + """ + the publishing policy of portfolio positions is different for different types, + overwrite this function for get the holding stocks in specific date + + :param code: portfolio(etf/block/index...) code + :param codes: portfolio(etf/block/index...) codes + :param ids: portfolio(etf/block/index...) ids + :param timestamp: the date of the holding stocks + :param provider: the data provider + :return: + """ + from zvt.contract.api import get_schema_by_name + + schema_str = f"{cls.__name__}Stock" + portfolio_stock = get_schema_by_name(schema_str) + return portfolio_stock.query_data(provider=provider, code=code, codes=codes, timestamp=timestamp, ids=ids) + + +#: 组合(Fund,Etf,Index,Block等)和个股(Stock)的关系 应该继承自该类 +#: 该基础类可以这样理解: +#: entity为组合本身,其包含了stock这种entity,timestamp为持仓日期,从py的"你知道你在干啥"的哲学出发,不加任何约束 +class PortfolioStock(Mixin): + #: portfolio标的类型 + entity_type = Column(String(length=64)) + #: portfolio所属交易所 + exchange = Column(String(length=32)) + #: portfolio编码 + code = Column(String(length=64)) + #: portfolio名字 + name = Column(String(length=128)) + + stock_id = Column(String) + stock_code = Column(String(length=64)) + stock_name = Column(String(length=128)) + + +#: 支持时间变化,报告期标的调整 +class PortfolioStockHistory(PortfolioStock): + #: 报告期,season1,half_year,season3,year + report_period = Column(String(length=32)) + #: 3-31,6-30,9-30,12-31 + report_date = Column(DateTime) + + #: 占净值比例 + proportion = Column(Float) + #: 持有股票的数量 + shares = Column(Float) + #: 持有股票的市值 + market_cap = Column(Float) + + +#: 交易标的和参与者的关系应该继承自该类, meet,遇见,恰如其分的诠释参与者和交易标的的关系 +#: 市场就是参与者与交易标的的关系,类的命名规范为{Entity}{relation}{Entity},entity_id代表"所"为的entity,"受"者entity以具体类别的id命名 +#: 比如StockTopTenHolder:TradableMeetActor中entity_id和actor_id,分别代表股票和股东 +class TradableMeetActor(Mixin): + #: tradable code + code = Column(String(length=64)) + #: tradable name + name = Column(String(length=128)) + + actor_id = Column(String) + actor_type = Column(String) + actor_code = Column(String(length=64)) + actor_name = Column(String(length=128)) + + +#: 也可以"所"为参与者,"受"为标的 +class ActorMeetTradable(Mixin): + #: actor code + code = Column(String(length=64)) + #: actor name + name = Column(String(length=128)) + + tradable_id = Column(String) + tradable_type = Column(String) + tradable_code = Column(String(length=64)) + tradable_name = Column(String(length=128)) + + +# the __all__ is generated +__all__ = [ + "Mixin", + "NormalMixin", + "Entity", + "TradableEntity", + "ActorEntity", + "NormalEntityMixin", + "Portfolio", + "PortfolioStock", + "PortfolioStockHistory", + "TradableMeetActor", + "ActorMeetTradable", +] diff --git a/src/zvt/contract/utils.py b/src/zvt/contract/utils.py new file mode 100644 index 00000000..0f73375a --- /dev/null +++ b/src/zvt/contract/utils.py @@ -0,0 +1,70 @@ +# -*- coding: utf-8 -*- +import math + +import pandas as pd + +from zvt.contract import IntervalLevel +from zvt.utils.time_utils import to_pd_timestamp + + +def is_in_same_interval(t1: pd.Timestamp, t2: pd.Timestamp, level: IntervalLevel): + t1 = to_pd_timestamp(t1) + t2 = to_pd_timestamp(t2) + if level == IntervalLevel.LEVEL_1WEEK: + return t1.week == t2.week + if level == IntervalLevel.LEVEL_1MON: + return t1.month == t2.month + + return level.floor_timestamp(t1) == level.floor_timestamp(t2) + + +def evaluate_size_from_timestamp( + start_timestamp, level: IntervalLevel, one_day_trading_minutes, end_timestamp: pd.Timestamp = None +): + """ + given from timestamp,level,one_day_trading_minutes,this func evaluate size of kdata to current. + it maybe a little bigger than the real size for fetching all the kdata. + + :param start_timestamp: + :type start_timestamp: pd.Timestamp + :param level: + :type level: IntervalLevel + :param one_day_trading_minutes: + :type one_day_trading_minutes: int + """ + if not end_timestamp: + end_timestamp = pd.Timestamp.now() + else: + end_timestamp = to_pd_timestamp(end_timestamp) + + time_delta = end_timestamp - to_pd_timestamp(start_timestamp) + + one_day_trading_seconds = one_day_trading_minutes * 60 + + if level == IntervalLevel.LEVEL_1DAY: + return time_delta.days + 1 + + if level == IntervalLevel.LEVEL_1WEEK: + return int(math.ceil(time_delta.days / 7)) + 1 + + if level == IntervalLevel.LEVEL_1MON: + return int(math.ceil(time_delta.days / 30)) + 1 + + if time_delta.days > 0: + seconds = (time_delta.days + 1) * one_day_trading_seconds + return int(math.ceil(seconds / level.to_second())) + 1 + else: + seconds = time_delta.total_seconds() + return min(int(math.ceil(seconds / level.to_second())) + 1, one_day_trading_seconds / level.to_second() + 1) + + +def next_timestamp_on_level(current_timestamp: pd.Timestamp, level: IntervalLevel) -> pd.Timestamp: + current_timestamp = to_pd_timestamp(current_timestamp) + return current_timestamp + pd.Timedelta(seconds=level.to_second()) + + +def is_finished_kdata_timestamp(timestamp, level: IntervalLevel): + timestamp = to_pd_timestamp(timestamp) + if level.floor_timestamp(timestamp) == timestamp: + return True + return False diff --git a/src/zvt/contract/zvt_info.py b/src/zvt/contract/zvt_info.py new file mode 100644 index 00000000..7eed002e --- /dev/null +++ b/src/zvt/contract/zvt_info.py @@ -0,0 +1,47 @@ +# -*- coding: utf-8 -*- +from sqlalchemy import Column, String, Text +from sqlalchemy.orm import declarative_base + +from zvt.contract.register import register_schema +from zvt.contract.schema import Mixin + +ZvtInfoBase = declarative_base() + + +class StateMixin(Mixin): + #: the unique name of the service, e.g. recorder,factor,tag + state_name = Column(String(length=128)) + + #: json string + state = Column(Text()) + + +class RecorderState(ZvtInfoBase, StateMixin): + """ + Schema for storing recorder state + """ + + __tablename__ = "recorder_state" + + +class TaggerState(ZvtInfoBase, StateMixin): + """ + Schema for storing tagger state + """ + + __tablename__ = "tagger_state" + + +class FactorState(ZvtInfoBase, StateMixin): + """ + Schema for storing factor state + """ + + __tablename__ = "factor_state" + + +register_schema(providers=["zvt"], db_name="zvt_info", schema_base=ZvtInfoBase) + + +# the __all__ is generated +__all__ = ["StateMixin", "RecorderState", "TaggerState", "FactorState"] diff --git a/src/zvt/domain/__init__.py b/src/zvt/domain/__init__.py new file mode 100644 index 00000000..ee78451b --- /dev/null +++ b/src/zvt/domain/__init__.py @@ -0,0 +1,192 @@ +# -*- coding: utf-8 -*- +import enum + + +class BlockCategory(enum.Enum): + #: 行业版块 + industry = "industry" + #: 概念版块 + concept = "concept" + #: 区域版块 + area = "area" + + +class IndexCategory(enum.Enum): + #: 中国指数提供商: + #: 中证指数公司 http://www.csindex.com.cn/zh-CN + #: 上证指数(上交所标的) 中证指数(沪深) + + #: 国证指数公司 http://www.cnindex.com.cn/index.html + #: 深证指数(深交所标的) 国证指数(沪深) + + #: 规模指数 + #: 常见的上证指数,深证指数等 + scope = "scope" + #: 行业指数 + industry = "industry" + #: 风格指数 + style = "style" + #: 主题指数 + # + #: 策略指数 + # + #: 综合指数 + # + #: 债券指数 + # + #: 基金指数 + fund = "fund" + #: 定制指数 + # + #: 人民币指数 + # + #: 跨境指数 + # + #: 其他指数 + + +class ReportPeriod(enum.Enum): + # 有些基金的2,4季报只有10大持仓,半年报和年报有详细持仓,需要区别对待 + season1 = "season1" + season2 = "season2" + season3 = "season3" + season4 = "season4" + half_year = "half_year" + year = "year" + + +# 用于区分不同的财务指标 +class CompanyType(enum.Enum): + qiye = "qiye" + baoxian = "baoxian" + yinhang = "yinhang" + quanshang = "quanshang" + + +CHINA_FUTURE_CODE_MAP_NAME = { + "I": "铁矿石", + "RB": "螺纹钢", + "HC": "热轧卷板", + "SS": "不锈钢", + "SF": "硅铁", + "SM": "锰硅", + "WR": "线材", + "CU": "沪铜", + "AL": "沪铝", + "ZN": "沪锌", + "PB": "沪铅", + "NI": "沪镍", + "SN": "沪锡", + "BC": "国际铜", + "AU": "沪金", + "AG": "沪银", + "A": "豆一", + "B": "豆二", + "Y": "豆油", + "M": "豆粕", + "RS": "菜籽", + "OI": "菜油", + "RM": "菜粕", + "P": "棕榈油", + "C": "玉米", + "CS": "玉米淀粉", + "JD": "鸡蛋", + "CF": "一号棉花", + "CY": "棉纱", + "SR": "白糖", + "AP": "苹果", + "CJ": "红枣", + "PK": "花生", + "PM": "普麦", + "WH": "强麦", + "RR": "粳米", + "JR": "粳稻", + "RI": "早籼稻", + "LR": "晚籼稻", + "LH": "生猪", + "SC": "原油", + "FU": "燃油", + "PG": "LPG", + "LU": "低硫燃油", + "BU": "石油沥青", + "MA": "甲醇", + "EG": "乙二醇", + "L": "聚乙烯", + "TA": "PTA", + "V": "聚氯乙烯", + "PP": "聚丙烯", + "EB": "苯乙烯", + "SA": "纯碱", + "FG": "玻璃", + "UR": "尿素", + "RU": "橡胶", + "NR": "20号胶", + "SP": "纸浆", + "FB": "纤维板", + "BB": "胶合板", + "PF": "短纤", + "JM": "焦煤", + "J": "焦炭", + "ZC": "动力煤", + "IC": "中证500指数", + "IF": "沪深300指数", + "IH": "上证50指数", + "T": "10年期国债期货", + "TF": "5年期国债期货", + "TS": "2年期国债期货", +} + + +def get_future_name(code): + simple_code = code[:-4] + return f"{CHINA_FUTURE_CODE_MAP_NAME[simple_code]}{code[-4:]}" + + +# the __all__ is generated +__all__ = ["BlockCategory", "IndexCategory", "ReportPeriod", "CompanyType", "get_future_name"] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule misc +from .misc import * +from .misc import __all__ as _misc_all + +__all__ += _misc_all + +# import all from submodule quotes +from .quotes import * +from .quotes import __all__ as _quotes_all + +__all__ += _quotes_all + +# import all from submodule meta +from .meta import * +from .meta import __all__ as _meta_all + +__all__ += _meta_all + +# import all from submodule fundamental +from .fundamental import * +from .fundamental import __all__ as _fundamental_all + +__all__ += _fundamental_all + +# import all from submodule macro +from .macro import * +from .macro import __all__ as _macro_all + +__all__ += _macro_all + +# import all from submodule actor +from .actor import * +from .actor import __all__ as _actor_all + +__all__ += _actor_all + +# import all from submodule emotion +from .emotion import * +from .emotion import __all__ as _emotion_all + +__all__ += _emotion_all diff --git a/src/zvt/domain/actor/__init__.py b/src/zvt/domain/actor/__init__.py new file mode 100644 index 00000000..c9d337ae --- /dev/null +++ b/src/zvt/domain/actor/__init__.py @@ -0,0 +1,21 @@ +# -*- coding: utf-8 -*- + + +# the __all__ is generated +__all__ = [] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule stock_actor +from .stock_actor import * +from .stock_actor import __all__ as _stock_actor_all + +__all__ += _stock_actor_all + +# import all from submodule actor_meta +from .actor_meta import * +from .actor_meta import __all__ as _actor_meta_all + +__all__ += _actor_meta_all diff --git a/src/zvt/domain/actor/actor_meta.py b/src/zvt/domain/actor/actor_meta.py new file mode 100644 index 00000000..d89ca653 --- /dev/null +++ b/src/zvt/domain/actor/actor_meta.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- + +from sqlalchemy.orm import declarative_base + +from zvt.contract.register import register_schema +from zvt.contract.schema import ActorEntity + +ActorMetaBase = declarative_base() + + +#: 参与者 +class ActorMeta(ActorMetaBase, ActorEntity): + __tablename__ = "actor_meta" + + +register_schema(providers=["em"], db_name="actor_meta", schema_base=ActorMetaBase) + + +# the __all__ is generated +__all__ = ["ActorMeta"] diff --git a/src/zvt/domain/actor/stock_actor.py b/src/zvt/domain/actor/stock_actor.py new file mode 100644 index 00000000..d0a93def --- /dev/null +++ b/src/zvt/domain/actor/stock_actor.py @@ -0,0 +1,83 @@ +# -*- coding: utf-8 -*- +from sqlalchemy import Column, String, DateTime, Float, Boolean, Integer +from sqlalchemy.orm import declarative_base + +from zvt.contract.register import register_schema +from zvt.contract.schema import TradableMeetActor + +StockActorBase = declarative_base() + + +class StockTopTenFreeHolder(StockActorBase, TradableMeetActor): + __tablename__ = "stock_top_ten_free_holder" + + report_period = Column(String(length=32)) + report_date = Column(DateTime) + + #: 持股数 + holding_numbers = Column(Float) + #: 持股比例 + holding_ratio = Column(Float) + #: 持股市值 + holding_values = Column(Float) + + +class StockTopTenHolder(StockActorBase, TradableMeetActor): + __tablename__ = "stock_top_ten_holder" + + report_period = Column(String(length=32)) + report_date = Column(DateTime) + + #: 持股数 + holding_numbers = Column(Float) + #: 持股比例 + holding_ratio = Column(Float) + #: 持股市值 + holding_values = Column(Float) + + +class StockInstitutionalInvestorHolder(StockActorBase, TradableMeetActor): + __tablename__ = "stock_institutional_investor_holder" + + report_period = Column(String(length=32)) + report_date = Column(DateTime) + + #: 持股数 + holding_numbers = Column(Float) + #: 持股比例 + holding_ratio = Column(Float) + #: 持股市值 + holding_values = Column(Float) + + +class StockActorSummary(StockActorBase, TradableMeetActor): + __tablename__ = "stock_actor_summary" + #: tradable code + code = Column(String(length=64)) + #: tradable name + name = Column(String(length=128)) + + report_period = Column(String(length=32)) + report_date = Column(DateTime) + + #: 变动比例 + change_ratio = Column(Float) + #: 是否完成 + is_complete = Column(Boolean) + #: 持股市值 + actor_type = Column(String) + actor_count = Column(Integer) + + #: 持股数 + holding_numbers = Column(Float) + #: 持股比例 + holding_ratio = Column(Float) + #: 持股市值 + holding_values = Column(Float) + + +register_schema(providers=["em"], db_name="stock_actor", schema_base=StockActorBase, entity_type="stock") + + +# the __all__ is generated +__all__ = ["StockTopTenFreeHolder", "StockTopTenHolder", "StockInstitutionalInvestorHolder", "StockActorSummary"] diff --git a/src/zvt/domain/emotion/__init__.py b/src/zvt/domain/emotion/__init__.py new file mode 100644 index 00000000..f0db3f9d --- /dev/null +++ b/src/zvt/domain/emotion/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- + + +# the __all__ is generated +__all__ = [] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule emotion +from .emotion import * +from .emotion import __all__ as _emotion_all + +__all__ += _emotion_all diff --git a/src/zvt/domain/emotion/emotion.py b/src/zvt/domain/emotion/emotion.py new file mode 100644 index 00000000..18688beb --- /dev/null +++ b/src/zvt/domain/emotion/emotion.py @@ -0,0 +1,89 @@ +# -*- coding: utf-8 -*- +from sqlalchemy import Column, String, Integer, DateTime, Boolean, Float +from sqlalchemy.orm import declarative_base + +from zvt.contract import Mixin +from zvt.contract.register import register_schema + +EmotionBase = declarative_base() + + +class LimitUpInfo(EmotionBase, Mixin): + __tablename__ = "limit_up_info" + + code = Column(String(length=32)) + name = Column(String(length=32)) + #: 是否新股 + is_new = Column(Boolean) + #: 是否回封,是就是打开过,否相反 + is_again_limit = Column(Boolean) + #: 涨停打开次数,0代表封住就没开板 + open_count = Column(Integer) + #: 首次封板时间 + first_limit_up_time = Column(DateTime) + #: 最后封板时间 + last_limit_up_time = Column(DateTime) + #: 涨停类型:换手板,一字板 + limit_up_type = Column(String) + #: 封单金额 + order_amount = Column(String) + #: 最近一年封板成功率 + success_rate = Column(Float) + #: 流通市值 + currency_value = Column(Float) + #: 涨幅 + change_pct = Column(Float) + #: 换手率 + turnover_rate = Column(Float) + #: 涨停原因 + reason = Column(String) + #: 几天几板 + high_days = Column(String) + #: 最近几板,不一定是连板 + high_days_count = Column(Integer) + + +class LimitDownInfo(EmotionBase, Mixin): + __tablename__ = "limit_down_info" + + code = Column(String(length=32)) + name = Column(String(length=32)) + #: 是否新股 + is_new = Column(Boolean) + #: 是否回封,是就是打开过,否相反 + is_again_limit = Column(Boolean) + #: 流通市值 + currency_value = Column(Float) + #: 涨幅 + change_pct = Column(Float) + #: 换手率 + turnover_rate = Column(Float) + + +class Emotion(EmotionBase, Mixin): + __tablename__ = "emotion" + #: 涨停数量 + limit_up_count = Column(Integer) + #: 炸板数 + limit_up_open_count = Column(Integer) + #: 涨停封板成功率 + limit_up_success_rate = Column(Float) + + #: 连板高度 + max_height = Column(Integer) + #: 连板数x个数 相加 + continuous_power = Column(Integer) + + #: 跌停数量 + limit_down_count = Column(Integer) + #: 跌停打开 + limit_down_open_count = Column(Integer) + #: 跌停封板成功率 + limit_down_success_rate = Column(Float) + + +register_schema(providers=["jqka"], db_name="emotion", schema_base=EmotionBase) + + +# the __all__ is generated +__all__ = ["LimitUpInfo", "LimitDownInfo", "Emotion"] diff --git a/zvt/domain/fundamental/__init__.py b/src/zvt/domain/fundamental/__init__.py similarity index 95% rename from zvt/domain/fundamental/__init__.py rename to src/zvt/domain/fundamental/__init__.py index e1e62507..70afbd39 100644 --- a/zvt/domain/fundamental/__init__.py +++ b/src/zvt/domain/fundamental/__init__.py @@ -1,4 +1,6 @@ # -*- coding: utf-8 -*- + + # the __all__ is generated __all__ = [] @@ -9,19 +11,23 @@ # import all from submodule dividend_financing from .dividend_financing import * from .dividend_financing import __all__ as _dividend_financing_all + __all__ += _dividend_financing_all # import all from submodule finance from .finance import * from .finance import __all__ as _finance_all + __all__ += _finance_all # import all from submodule trading from .trading import * from .trading import __all__ as _trading_all + __all__ += _trading_all # import all from submodule valuation from .valuation import * from .valuation import __all__ as _valuation_all -__all__ += _valuation_all \ No newline at end of file + +__all__ += _valuation_all diff --git a/zvt/domain/fundamental/dividend_financing.py b/src/zvt/domain/fundamental/dividend_financing.py similarity index 76% rename from zvt/domain/fundamental/dividend_financing.py rename to src/zvt/domain/fundamental/dividend_financing.py index 62842465..e54c1fdc 100644 --- a/zvt/domain/fundamental/dividend_financing.py +++ b/src/zvt/domain/fundamental/dividend_financing.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- from sqlalchemy import Column, String, DateTime, Float -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import declarative_base from zvt.contract import Mixin from zvt.contract.register import register_schema @@ -9,22 +9,22 @@ class DividendFinancing(DividendFinancingBase, Mixin): - __tablename__ = 'dividend_financing' + __tablename__ = "dividend_financing" provider = Column(String(length=32)) code = Column(String(length=32)) - # 分红总额 + #: 分红总额 dividend_money = Column(Float) - # 新股 + #: 新股 ipo_issues = Column(Float) ipo_raising_fund = Column(Float) - # 增发 + #: 增发 spo_issues = Column(Float) spo_raising_fund = Column(Float) - # 配股 + #: 配股 rights_issues = Column(Float) rights_raising_fund = Column(Float) @@ -35,14 +35,14 @@ class DividendDetail(DividendFinancingBase, Mixin): provider = Column(String(length=32)) code = Column(String(length=32)) - # 公告日 + #: 公告日 announce_date = Column(DateTime) - # 股权登记日 + #: 股权登记日 record_date = Column(DateTime) - # 除权除息日 + #: 除权除息日 dividend_date = Column(DateTime) - # 方案 + #: 方案 dividend = Column(String(length=128)) @@ -63,13 +63,16 @@ class RightsIssueDetail(DividendFinancingBase, Mixin): provider = Column(String(length=32)) code = Column(String(length=32)) - # 配股 + #: 配股 rights_issues = Column(Float) rights_issue_price = Column(Float) rights_raising_fund = Column(Float) -register_schema(providers=['eastmoney'], db_name='dividend_financing', schema_base=DividendFinancingBase, entity_type='stock') +register_schema( + providers=["eastmoney"], db_name="dividend_financing", schema_base=DividendFinancingBase, entity_type="stock" +) + # the __all__ is generated -__all__ = ['DividendFinancing', 'DividendDetail', 'SpoDetail', 'RightsIssueDetail'] \ No newline at end of file +__all__ = ["DividendFinancing", "DividendDetail", "SpoDetail", "RightsIssueDetail"] diff --git a/zvt/domain/fundamental/finance.py b/src/zvt/domain/fundamental/finance.py similarity index 50% rename from zvt/domain/fundamental/finance.py rename to src/zvt/domain/fundamental/finance.py index ee4cc60f..58d51156 100644 --- a/zvt/domain/fundamental/finance.py +++ b/src/zvt/domain/fundamental/finance.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- from sqlalchemy import Column, String, DateTime, Float, Integer -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import declarative_base from zvt.contract import Mixin from zvt.contract.register import register_schema @@ -9,13 +9,19 @@ class BalanceSheet(FinanceBase, Mixin): - @classmethod def important_cols(cls): - return ['total_assets', 'total_liabilities', 'equity', 'cash_and_cash_equivalents', 'accounts_receivable', - 'inventories', 'goodwill'] - - __tablename__ = 'balance_sheet' + return [ + "total_assets", + "total_liabilities", + "equity", + "cash_and_cash_equivalents", + "accounts_receivable", + "inventories", + "goodwill", + ] + + __tablename__ = "balance_sheet" provider = Column(String(length=32)) code = Column(String(length=32)) @@ -23,442 +29,448 @@ def important_cols(cls): report_period = Column(String(length=32)) report_date = Column(DateTime) - # 流动资产 + #: 流动资产 # - # 货币资金 + #: 货币资金 cash_and_cash_equivalents = Column(Float) - # 应收票据 + #: 应收票据 note_receivable = Column(Float) - # 应收账款 + #: 应收账款 accounts_receivable = Column(Float) - # 预付款项 + #: 预付款项 advances_to_suppliers = Column(Float) - # 其他应收款 + #: 其他应收款 other_receivables = Column(Float) - # 存货 + #: 存货 inventories = Column(Float) - # 一年内到期的非流动资产 + #: 一年内到期的非流动资产 current_portion_of_non_current_assets = Column(Float) - # 其他流动资产 + #: 其他流动资产 other_current_assets = Column(Float) - # 流动资产合计 + #: 流动资产合计 total_current_assets = Column(Float) - # 非流动资产 + #: 非流动资产 # - # 可供出售金融资产 + #: 可供出售金融资产 fi_assets_saleable = Column(Float) - # 长期应收款 + #: 长期应收款 long_term_receivables = Column(Float) - # 长期股权投资 + #: 长期股权投资 long_term_equity_investment = Column(Float) - # 投资性房地产 + #: 投资性房地产 real_estate_investment = Column(Float) - # 固定资产 + #: 固定资产 fixed_assets = Column(Float) - # 在建工程 + #: 在建工程 construction_in_process = Column(Float) - # 无形资产 + #: 无形资产 intangible_assets = Column(Float) - # 商誉 + #: 商誉 goodwill = Column(Float) - # 长期待摊费用 + #: 长期待摊费用 long_term_prepaid_expenses = Column(Float) - # 递延所得税资产 + #: 递延所得税资产 deferred_tax_assets = Column(Float) - # 其他非流动资产 + #: 其他非流动资产 other_non_current_assets = Column(Float) - # 非流动资产合计 + #: 非流动资产合计 total_non_current_assets = Column(Float) - # 资产总计 + #: 资产总计 total_assets = Column(Float) - # 流动负债 + #: 流动负债 # - # 短期借款 + #: 短期借款 short_term_borrowing = Column(Float) - # 吸收存款及同业存放 + #: 吸收存款及同业存放 accept_money_deposits = Column(Float) - # 应付账款 + #: 应付账款 accounts_payable = Column(Float) - # 预收款项 + #: 预收款项 advances_from_customers = Column(Float) - # 应付职工薪酬 + #: 应付职工薪酬 employee_benefits_payable = Column(Float) - # 应交税费 + #: 应交税费 taxes_payable = Column(Float) - # 应付利息 + #: 应付利息 interest_payable = Column(Float) - # 其他应付款 + #: 其他应付款 other_payable = Column(Float) - # 一年内到期的非流动负债 + #: 一年内到期的非流动负债 current_portion_of_non_current_liabilities = Column(Float) - # 其他流动负债 + #: 其他流动负债 other_current_liabilities = Column(Float) - # 流动负债合计 + #: 流动负债合计 total_current_liabilities = Column(Float) - # 非流动负债 + #: 非流动负债 # - # 长期借款 + #: 长期借款 long_term_borrowing = Column(Float) - # 长期应付款 + #: 长期应付款 long_term_payable = Column(Float) - # 递延收益 + #: 递延收益 deferred_revenue = Column(Float) - # 递延所得税负债 + #: 递延所得税负债 deferred_tax_liabilities = Column(Float) - # 其他非流动负债 + #: 其他非流动负债 other_non_current_liabilities = Column(Float) - # 非流动负债合计 + #: 非流动负债合计 total_non_current_liabilities = Column(Float) - # 负债合计 + #: 负债合计 total_liabilities = Column(Float) - # 所有者权益(或股东权益) + #: 所有者权益(或股东权益) # - # 实收资本(或股本) + #: 实收资本(或股本) capital = Column(Float) - # 资本公积 + #: 资本公积 capital_reserve = Column(Float) - # 专项储备 + #: 专项储备 special_reserve = Column(Float) - # 盈余公积 + #: 盈余公积 surplus_reserve = Column(Float) - # 未分配利润 + #: 未分配利润 undistributed_profits = Column(Float) - # 归属于母公司股东权益合计 + #: 归属于母公司股东权益合计 equity = Column(Float) - # 少数股东权益 + #: 少数股东权益 equity_as_minority_interest = Column(Float) - # 股东权益合计 + #: 股东权益合计 total_equity = Column(Float) - # 负债和股东权益合计 + #: 负债和股东权益合计 total_liabilities_and_equity = Column(Float) - # 银行相关 - # 资产 - # 现金及存放中央银行款项 + #: 银行相关 + #: 资产 + #: 现金及存放中央银行款项 fi_cash_and_deposit_in_central_bank = Column(Float) - # 存放同业款项 + #: 存放同业款项 fi_deposit_in_other_fi = Column(Float) - # 贵金属 + #: 贵金属 fi_expensive_metals = Column(Float) - # 拆出资金 + #: 拆出资金 fi_lending_to_other_fi = Column(Float) - # 以公允价值计量且其变动计入当期损益的金融资产 + #: 以公允价值计量且其变动计入当期损益的金融资产 fi_financial_assets_effect_current_income = Column(Float) - # 衍生金融资产 + #: 衍生金融资产 fi_financial_derivative_asset = Column(Float) - # 买入返售金融资产 + #: 买入返售金融资产 fi_buying_sell_back_fi__asset = Column(Float) - # 应收账款 + #: 应收账款 # - # 应收利息 + #: 应收利息 fi_interest_receivable = Column(Float) - # 发放贷款及垫款 + #: 发放贷款及垫款 fi_disbursing_loans_and_advances = Column(Float) - # 可供出售金融资产 + #: 可供出售金融资产 # - # 持有至到期投资 + #: 持有至到期投资 fi_held_to_maturity_investment = Column(Float) - # 应收款项类投资 + #: 应收款项类投资 fi_account_receivable_investment = Column(Float) - # 投资性房地产 + #: 投资性房地产 # - # 固定资产 + #: 固定资产 # - # 无形资产 + #: 无形资产 # - # 商誉 + #: 商誉 # - # 递延所得税资产 + #: 递延所得税资产 # - # 其他资产 + #: 其他资产 fi_other_asset = Column(Float) - # 资产总计 + #: 资产总计 # - # 负债 + #: 负债 # - # 向中央银行借款 + #: 向中央银行借款 fi_borrowings_from_central_bank = Column(Float) - # 同业和其他金融机构存放款项 + #: 同业和其他金融机构存放款项 fi_deposit_from_other_fi = Column(Float) - # 拆入资金 + #: 拆入资金 fi_borrowings_from_fi = Column(Float) - # 以公允价值计量且其变动计入当期损益的金融负债 + #: 以公允价值计量且其变动计入当期损益的金融负债 fi_financial_liability_effect_current_income = Column(Float) - # 衍生金融负债 + #: 衍生金融负债 fi_financial_derivative_liability = Column(Float) - # 卖出回购金融资产款 + #: 卖出回购金融资产款 fi_sell_buy_back_fi_asset = Column(Float) - # 吸收存款 + #: 吸收存款 fi_savings_absorption = Column(Float) - # 存款证及应付票据 + #: 存款证及应付票据 fi_notes_payable = Column(Float) - # 应付职工薪酬 + #: 应付职工薪酬 # - # 应交税费 + #: 应交税费 # - # 应付利息 + #: 应付利息 # - # 预计负债 + #: 预计负债 fi_estimated_liabilities = Column(Float) - # 应付债券 + #: 应付债券 fi_bond_payable = Column(Float) - # 其他负债 + #: 其他负债 fi_other_liability = Column(Float) - # 负债合计 + #: 负债合计 # - # 所有者权益(或股东权益) - # 股本 + #: 所有者权益(或股东权益) + #: 股本 fi_capital = Column(Float) - # 其他权益工具 + #: 其他权益工具 fi_other_equity_instruments = Column(Float) - # 其中:优先股 + #: 其中:优先股 fi_preferred_stock = Column(Float) - # 资本公积 + #: 资本公积 # - # 盈余公积 + #: 盈余公积 # - # 一般风险准备 + #: 一般风险准备 fi_generic_risk_reserve = Column(Float) - # 未分配利润 + #: 未分配利润 # - # 归属于母公司股东权益合计 + #: 归属于母公司股东权益合计 # - # 股东权益合计 + #: 股东权益合计 # - # 负债及股东权益总计 + #: 负债及股东权益总计 - # 券商相关 - # 资产 + #: 券商相关 + #: 资产 # - # 货币资金 + #: 货币资金 # - # 其中: 客户资金存款 + #: 其中: 客户资金存款 fi_client_fund = Column(Float) - # 结算备付金 + #: 结算备付金 fi_deposit_reservation_for_balance = Column(Float) - # 其中: 客户备付金 + #: 其中: 客户备付金 fi_client_deposit_reservation_for_balance = Column(Float) - # 融出资金 + #: 融出资金 fi_margin_out_fund = Column(Float) - # 以公允价值计量且其变动计入当期损益的金融资产 + #: 以公允价值计量且其变动计入当期损益的金融资产 # - # 衍生金融资产 + #: 衍生金融资产 # - # 买入返售金融资产 + #: 买入返售金融资产 # - # 应收利息 + #: 应收利息 # - # 应收款项 + #: 应收款项 fi_receivables = Column(Float) - # 存出保证金 + #: 存出保证金 fi_deposit_for_recognizance = Column(Float) - # 可供出售金融资产 + #: 可供出售金融资产 # - # 持有至到期投资 + #: 持有至到期投资 # - # 长期股权投资 + #: 长期股权投资 # - # 固定资产 + #: 固定资产 # - # 在建工程 + #: 在建工程 # - # 无形资产 + #: 无形资产 # - # 商誉 + #: 商誉 # - # 递延所得税资产 + #: 递延所得税资产 # - # 其他资产 + #: 其他资产 # - # 资产总计 + #: 资产总计 # - # 负债 + #: 负债 # - # 短期借款 + #: 短期借款 # - # 拆入资金 + #: 拆入资金 # - # 以公允价值计量且其变动计入当期损益的金融负债 + #: 以公允价值计量且其变动计入当期损益的金融负债 # - # 衍生金融负债 + #: 衍生金融负债 # - # 卖出回购金融资产款 + #: 卖出回购金融资产款 # - # 代理买卖证券款 + #: 代理买卖证券款 fi_receiving_as_agent = Column(Float) - # 应付账款 + #: 应付账款 # - # 应付职工薪酬 + #: 应付职工薪酬 # - # 应交税费 + #: 应交税费 # - # 应付利息 + #: 应付利息 # - # 应付短期融资款 + #: 应付短期融资款 fi_short_financing_payable = Column(Float) - # 预计负债 + #: 预计负债 # - # 应付债券 + #: 应付债券 # - # 递延所得税负债 + #: 递延所得税负债 # - # 其他负债 + #: 其他负债 # - # 负债合计 + #: 负债合计 # - # 所有者权益(或股东权益) + #: 所有者权益(或股东权益) # - # 股本 + #: 股本 # - # 资本公积 + #: 资本公积 # - # 其他权益工具 + #: 其他权益工具 # - # 盈余公积 + #: 盈余公积 # - # 一般风险准备 + #: 一般风险准备 # - # 交易风险准备 + #: 交易风险准备 fi_trade_risk_reserve = Column(Float) - # 未分配利润 + #: 未分配利润 # - # 归属于母公司股东权益合计 + #: 归属于母公司股东权益合计 # - # 少数股东权益 + #: 少数股东权益 # - # 股东权益合计 + #: 股东权益合计 # - # 负债和股东权益总计 + #: 负债和股东权益总计 - # 保险相关 + #: 保险相关 - # 资产 - # 应收保费 + #: 资产 + #: 应收保费 fi_premiums_receivable = Column(Float) - # 应收分保账款 + #: 应收分保账款 fi_reinsurance_premium_receivable = Column(Float) - # 应收分保合同准备金 + #: 应收分保合同准备金 fi_reinsurance_contract_reserve = Column(Float) - # 保户质押贷款 + #: 保户质押贷款 fi_policy_pledge_loans = Column(Float) - # 发放贷款及垫款 - # 定期存款 + #: 发放贷款及垫款 + #: 定期存款 fi_time_deposit = Column(Float) - # 可供出售金融资产 + #: 可供出售金融资产 # - # 持有至到期投资 + #: 持有至到期投资 # - # 应收款项类投资 + #: 应收款项类投资 # - # 应收账款 + #: 应收账款 # - # 长期股权投资 + #: 长期股权投资 # - # 存出资本保证金 + #: 存出资本保证金 fi_deposit_for_capital_recognizance = Column(Float) - # 投资性房地产 + #: 投资性房地产 # - # 固定资产 + #: 固定资产 # - # 无形资产 + #: 无形资产 # - # 商誉 + #: 商誉 # - # 递延所得税资产 + #: 递延所得税资产 # - # 其他资产 + #: 其他资产 # - # 独立账户资产 + #: 独立账户资产 fi_capital_in_independent_accounts = Column(Float) - # 资产总计 + #: 资产总计 # - # 负债 + #: 负债 # - # 短期借款 + #: 短期借款 # - # 同业及其他金融机构存放款项 + #: 同业及其他金融机构存放款项 # - # 拆入资金 + #: 拆入资金 # - # 以公允价值计量且其变动计入当期损益的金融负债 + #: 以公允价值计量且其变动计入当期损益的金融负债 # - # 衍生金融负债 + #: 衍生金融负债 # - # 卖出回购金融资产款 + #: 卖出回购金融资产款 # - # 吸收存款 + #: 吸收存款 # - # 代理买卖证券款 + #: 代理买卖证券款 # - # 应付账款 + #: 应付账款 # - # 预收账款 + #: 预收账款 fi_advance_from_customers = Column(Float) - # 预收保费 + #: 预收保费 fi_advance_premium = Column(Float) - # 应付手续费及佣金 + #: 应付手续费及佣金 fi_fees_and_commissions_payable = Column(Float) - # 应付分保账款 + #: 应付分保账款 fi_dividend_payable_for_reinsurance = Column(Float) - # 应付职工薪酬 + #: 应付职工薪酬 # - # 应交税费 + #: 应交税费 # - # 应付利息 + #: 应付利息 # - # 预计负债 + #: 预计负债 # - # 应付赔付款 + #: 应付赔付款 fi_claims_payable = Column(Float) - # 应付保单红利 + #: 应付保单红利 fi_policy_holder_dividend_payable = Column(Float) - # 保户储金及投资款 + #: 保户储金及投资款 fi_policy_holder_deposits_and_investment_funds = Column(Float) - # 保险合同准备金 + #: 保险合同准备金 fi_contract_reserve = Column(Float) - # 长期借款 + #: 长期借款 # - # 应付债券 + #: 应付债券 # - # 递延所得税负债 + #: 递延所得税负债 # - # 其他负债 + #: 其他负债 # - # 独立账户负债 + #: 独立账户负债 fi_independent_liability = Column(Float) - # 负债合计 + #: 负债合计 # - # 所有者权益(或股东权益) + #: 所有者权益(或股东权益) # - # 股本 + #: 股本 # - # 资本公积 + #: 资本公积 # - # 盈余公积 + #: 盈余公积 # - # 一般风险准备 + #: 一般风险准备 # - # 未分配利润 + #: 未分配利润 # - # 归属于母公司股东权益总计 + #: 归属于母公司股东权益总计 # - # 少数股东权益 + #: 少数股东权益 # - # 股东权益合计 + #: 股东权益合计 # - # 负债和股东权益总计 + #: 负债和股东权益总计 class IncomeStatement(FinanceBase, Mixin): - @classmethod def important_cols(cls): - return ['operating_income', 'investment_income', 'total_operating_costs', 'total_profits', 'sales_costs', - 'managing_costs', 'financing_costs'] - - __tablename__ = 'income_statement' + return [ + "operating_income", + "investment_income", + "total_operating_costs", + "total_profits", + "sales_costs", + "managing_costs", + "financing_costs", + ] + + __tablename__ = "income_statement" provider = Column(String(length=32)) code = Column(String(length=32)) @@ -466,502 +478,513 @@ def important_cols(cls): report_period = Column(String(length=32)) report_date = Column(DateTime) - # 营业总收入 + #: 营业总收入 # - # 营业收入 + #: 营业收入 operating_income = Column(Float) - # 营业总成本 + #: 营业总成本 total_operating_costs = Column(Float) - # 营业成本 + #: 营业成本 operating_costs = Column(Float) - # 研发费用 + #: 研发费用 rd_costs = Column(Float) - # 提取保险合同准备金净额 + #: 提取保险合同准备金净额 net_change_in_insurance_contract_reserves = Column(Float) - # 营业税金及附加 + #: 营业税金及附加 business_taxes_and_surcharges = Column(Float) - # 销售费用 + #: 销售费用 sales_costs = Column(Float) - # 管理费用 + #: 管理费用 managing_costs = Column(Float) - # 财务费用 + #: 财务费用 financing_costs = Column(Float) - # 资产减值损失 + #: 资产减值损失 assets_devaluation = Column(Float) - # 其他经营收益 + #: 其他经营收益 # - # 加: 投资收益 + #: 加: 投资收益 investment_income = Column(Float) - # 其中: 对联营企业和合营企业的投资收益 + #: 其中: 对联营企业和合营企业的投资收益 investment_income_from_related_enterprise = Column(Float) - # 营业利润 + #: 营业利润 operating_profit = Column(Float) - # 加: 营业外收入 + #: 加: 营业外收入 non_operating_income = Column(Float) - # 减: 营业外支出 + #: 减: 营业外支出 non_operating_costs = Column(Float) - # 其中: 非流动资产处置净损失 + #: 其中: 非流动资产处置净损失 loss_on_disposal_non_current_asset = Column(Float) - # 利润总额 + #: 利润总额 total_profits = Column(Float) - # 减: 所得税费用 + #: 减: 所得税费用 tax_expense = Column(Float) - # 净利润 + #: 净利润 net_profit = Column(Float) - # 其中: 归属于母公司股东的净利润 + #: 其中: 归属于母公司股东的净利润 net_profit_as_parent = Column(Float) - # 少数股东损益 + #: 少数股东损益 net_profit_as_minority_interest = Column(Float) - # 扣除非经常性损益后的净利润 + #: 扣除非经常性损益后的净利润 deducted_net_profit = Column(Float) - # 每股收益 - # 基本每股收益 + #: 每股收益 + #: 基本每股收益 eps = Column(Float) - # 稀释每股收益 + #: 稀释每股收益 diluted_eps = Column(Float) - # 其他综合收益 + #: 其他综合收益 other_comprehensive_income = Column(Float) - # 归属于母公司股东的其他综合收益 + #: 归属于母公司股东的其他综合收益 other_comprehensive_income_as_parent = Column(Float) - # 归属于少数股东的其他综合收益 + #: 归属于少数股东的其他综合收益 other_comprehensive_income_as_minority_interest = Column(Float) - # 综合收益总额 + #: 综合收益总额 total_comprehensive_income = Column(Float) - # 归属于母公司所有者的综合收益总额 + #: 归属于母公司所有者的综合收益总额 total_comprehensive_income_as_parent = Column(Float) - # 归属于少数股东的综合收益总额 + #: 归属于少数股东的综合收益总额 total_comprehensive_income_as_minority_interest = Column(Float) - # 银行相关 - # 利息净收入 + #: 银行相关 + #: 利息净收入 fi_net_interest_income = Column(Float) - # 其中:利息收入 + #: 其中:利息收入 fi_interest_income = Column(Float) - # 利息支出 + #: 利息支出 fi_interest_expenses = Column(Float) - # 手续费及佣金净收入 + #: 手续费及佣金净收入 fi_net_incomes_from_fees_and_commissions = Column(Float) - # 其中:手续费及佣金收入 + #: 其中:手续费及佣金收入 fi_incomes_from_fees_and_commissions = Column(Float) - # 手续费及佣金支出 + #: 手续费及佣金支出 fi_expenses_for_fees_and_commissions = Column(Float) - # 公允价值变动收益 + #: 公允价值变动收益 fi_income_from_fair_value_change = Column(Float) - # 汇兑收益 + #: 汇兑收益 fi_income_from_exchange = Column(Float) - # 其他业务收入 + #: 其他业务收入 fi_other_income = Column(Float) - # 业务及管理费 + #: 业务及管理费 fi_operate_and_manage_expenses = Column(Float) - # 保险相关 - # 已赚保费 + #: 保险相关 + #: 已赚保费 fi_net_income_from_premium = Column(Float) - # 其中:保险业务收入 + #: 其中:保险业务收入 fi_income_from_premium = Column(Float) - # 分保费收入 + #: 分保费收入 fi_income_from_reinsurance_premium = Column(Float) - # 减:分出保费 + #: 减:分出保费 fi_reinsurance_premium = Column(Float) - # 提取未到期责任准备金 + #: 提取未到期责任准备金 fi_undue_duty_reserve = Column(Float) - # 银行业务利息净收入 + #: 银行业务利息净收入 fi_net_income_from_bank_interest = Column(Float) - # 其中:银行业务利息收入 + #: 其中:银行业务利息收入 fi_income_from_bank_interest = Column(Float) - # 银行业务利息支出 + #: 银行业务利息支出 fi_expenses_for_bank_interest = Column(Float) - # 非保险业务手续费及佣金净收入 + #: 非保险业务手续费及佣金净收入 fi_net_incomes_from_fees_and_commissions_of_non_insurance = Column(Float) - # 非保险业务手续费及佣金收入 + #: 非保险业务手续费及佣金收入 fi_incomes_from_fees_and_commissions_of_non_insurance = Column(Float) - # 非保险业务手续费及佣金支出 + #: 非保险业务手续费及佣金支出 fi_expenses_for_fees_and_commissions_of_non_insurance = Column(Float) - # 退保金 + #: 退保金 fi_insurance_surrender_costs = Column(Float) - # 赔付支出 + #: 赔付支出 fi_insurance_claims_expenses = Column(Float) - # 减:摊回赔付支出 + #: 减:摊回赔付支出 fi_amortized_insurance_claims_expenses = Column(Float) - # 提取保险责任准备金 + #: 提取保险责任准备金 fi_insurance_duty_reserve = Column(Float) - # 减:摊回保险责任准备金 + #: 减:摊回保险责任准备金 fi_amortized_insurance_duty_reserve = Column(Float) - # 保单红利支出 + #: 保单红利支出 fi_dividend_expenses_to_insured = Column(Float) - # 分保费用 + #: 分保费用 fi_reinsurance_expenses = Column(Float) - # 减:摊回分保费用 + #: 减:摊回分保费用 fi_amortized_reinsurance_expenses = Column(Float) - # 其他业务成本 + #: 其他业务成本 fi_other_op_expenses = Column(Float) - # 券商相关 - # 手续费及佣金净收入 + #: 券商相关 + #: 手续费及佣金净收入 # - # 其中:代理买卖证券业务净收入 + #: 其中:代理买卖证券业务净收入 fi_net_incomes_from_trading_agent = Column(Float) - # 证券承销业务净收入 + #: 证券承销业务净收入 fi_net_incomes_from_underwriting = Column(Float) - # 受托客户资产管理业务净收入 + #: 受托客户资产管理业务净收入 fi_net_incomes_from_customer_asset_management = Column(Float) - # 手续费及佣金净收入其他项目 + #: 手续费及佣金净收入其他项目 fi_fees_from_other = Column(Float) - # 公允价值变动收益 + #: 公允价值变动收益 # - # 其中:可供出售金融资产公允价值变动损益 + #: 其中:可供出售金融资产公允价值变动损益 fi_income_from_fair_value_change_of_fi_salable = Column(Float) class CashFlowStatement(FinanceBase, Mixin): @classmethod def important_cols(cls): - return ['net_op_cash_flows', 'net_investing_cash_flows', 'net_financing_cash_flows', 'cash'] + return ["net_op_cash_flows", "net_investing_cash_flows", "net_financing_cash_flows", "cash"] - __tablename__ = 'cash_flow_statement' + __tablename__ = "cash_flow_statement" provider = Column(String(length=32)) code = Column(String(length=32)) report_period = Column(String(length=32)) report_date = Column(DateTime) - # 经营活动产生的现金流量 + #: 经营活动产生的现金流量 # - # 销售商品、提供劳务收到的现金 + #: 销售商品、提供劳务收到的现金 cash_from_selling = Column(Float) - # 收到的税费返还 + #: 收到的税费返还 tax_refund = Column(Float) - # 收到其他与经营活动有关的现金 + #: 收到其他与经营活动有关的现金 cash_from_other_op = Column(Float) - # 经营活动现金流入小计 + #: 经营活动现金流入小计 total_op_cash_inflows = Column(Float) - # 购买商品、接受劳务支付的现金 + #: 购买商品、接受劳务支付的现金 cash_to_goods_services = Column(Float) - # 支付给职工以及为职工支付的现金 + #: 支付给职工以及为职工支付的现金 cash_to_employees = Column(Float) - # 支付的各项税费 + #: 支付的各项税费 taxes_and_surcharges = Column(Float) - # 支付其他与经营活动有关的现金 + #: 支付其他与经营活动有关的现金 cash_to_other_related_op = Column(Float) - # 经营活动现金流出小计 + #: 经营活动现金流出小计 total_op_cash_outflows = Column(Float) - # 经营活动产生的现金流量净额 + #: 经营活动产生的现金流量净额 net_op_cash_flows = Column(Float) - # 投资活动产生的现金流量 + #: 投资活动产生的现金流量 - # 收回投资收到的现金 + #: 收回投资收到的现金 cash_from_disposal_of_investments = Column(Float) - # 取得投资收益收到的现金 + #: 取得投资收益收到的现金 cash_from_returns_on_investments = Column(Float) - # 处置固定资产、无形资产和其他长期资产收回的现金净额 + #: 处置固定资产、无形资产和其他长期资产收回的现金净额 cash_from_disposal_fixed_intangible_assets = Column(Float) - # 处置子公司及其他营业单位收到的现金净额 + #: 处置子公司及其他营业单位收到的现金净额 cash_from_disposal_subsidiaries = Column(Float) - # 收到其他与投资活动有关的现金 + #: 收到其他与投资活动有关的现金 cash_from_other_investing = Column(Float) - # 投资活动现金流入小计 + #: 投资活动现金流入小计 total_investing_cash_inflows = Column(Float) - # 购建固定资产、无形资产和其他长期资产支付的现金 + #: 购建固定资产、无形资产和其他长期资产支付的现金 cash_to_acquire_fixed_intangible_assets = Column(Float) - # 投资支付的现金 + #: 投资支付的现金 cash_to_investments = Column(Float) - # 取得子公司及其他营业单位支付的现金净额 + #: 取得子公司及其他营业单位支付的现金净额 cash_to_acquire_subsidiaries = Column(Float) - # 支付其他与投资活动有关的现金 + #: 支付其他与投资活动有关的现金 cash_to_other_investing = Column(Float) - # 投资活动现金流出小计 + #: 投资活动现金流出小计 total_investing_cash_outflows = Column(Float) - # 投资活动产生的现金流量净额 + #: 投资活动产生的现金流量净额 net_investing_cash_flows = Column(Float) - # 筹资活动产生的现金流量 + #: 筹资活动产生的现金流量 # - # 吸收投资收到的现金 + #: 吸收投资收到的现金 cash_from_accepting_investment = Column(Float) - # 子公司吸收少数股东投资收到的现金 + #: 子公司吸收少数股东投资收到的现金 cash_from_subsidiaries_accepting_minority_interest = Column(Float) - # 取得借款收到的现金 + #: 取得借款收到的现金 cash_from_borrowings = Column(Float) - # 发行债券收到的现金 + #: 发行债券收到的现金 cash_from_issuing_bonds = Column(Float) - # 收到其他与筹资活动有关的现金 + #: 收到其他与筹资活动有关的现金 cash_from_other_financing = Column(Float) - # 筹资活动现金流入小计 + #: 筹资活动现金流入小计 total_financing_cash_inflows = Column(Float) - # 偿还债务支付的现金 + #: 偿还债务支付的现金 cash_to_repay_borrowings = Column(Float) - # 分配股利、利润或偿付利息支付的现金 + #: 分配股利、利润或偿付利息支付的现金 cash_to_pay_interest_dividend = Column(Float) - # 子公司支付给少数股东的股利、利润 + #: 子公司支付给少数股东的股利、利润 cash_to_pay_subsidiaries_minority_interest = Column(Float) - # 支付其他与筹资活动有关的现金 + #: 支付其他与筹资活动有关的现金 cash_to_other_financing = Column(Float) - # 筹资活动现金流出小计 + #: 筹资活动现金流出小计 total_financing_cash_outflows = Column(Float) - # 筹资活动产生的现金流量净额 + #: 筹资活动产生的现金流量净额 net_financing_cash_flows = Column(Float) - # 汇率变动对现金及现金等价物的影响 + #: 汇率变动对现金及现金等价物的影响 foreign_exchange_rate_effect = Column(Float) - # 现金及现金等价物净增加额 + #: 现金及现金等价物净增加额 net_cash_increase = Column(Float) - # 加: 期初现金及现金等价物余额 + #: 加: 期初现金及现金等价物余额 cash_at_beginning = Column(Float) - # 期末现金及现金等价物余额 + #: 期末现金及现金等价物余额 cash = Column(Float) - # 银行相关 - # 客户存款和同业及其他金融机构存放款项净增加额 + #: 银行相关 + #: 客户存款和同业及其他金融机构存放款项净增加额 fi_deposit_increase = Column(Float) - # 向中央银行借款净增加额 + #: 向中央银行借款净增加额 fi_borrow_from_central_bank_increase = Column(Float) - # 存放中央银行和同业款项及其他金融机构净减少额 + #: 存放中央银行和同业款项及其他金融机构净减少额 fi_deposit_in_others_decrease = Column(Float) - # 拆入资金及卖出回购金融资产款净增加额 + #: 拆入资金及卖出回购金融资产款净增加额 fi_borrowing_and_sell_repurchase_increase = Column(Float) - # 其中:卖出回购金融资产款净增加额 + #: 其中:卖出回购金融资产款净增加额 fi_sell_repurchase_increase = Column(Float) - # 拆出资金及买入返售金融资产净减少额 + #: 拆出资金及买入返售金融资产净减少额 fi_lending_and_buy_repurchase_decrease = Column(Float) - # 其中:拆出资金净减少额 + #: 其中:拆出资金净减少额 fi_lending_decrease = Column(Float) - # 买入返售金融资产净减少额 + #: 买入返售金融资产净减少额 fi_buy_repurchase_decrease = Column(Float) - # 收取的利息、手续费及佣金的现金 + #: 收取的利息、手续费及佣金的现金 fi_cash_from_interest_commission = Column(Float) - # 客户贷款及垫款净增加额 + #: 客户贷款及垫款净增加额 fi_loan_advance_increase = Column(Float) - # 存放中央银行和同业及其他金融机构款项净增加额 + #: 存放中央银行和同业及其他金融机构款项净增加额 fi_deposit_in_others_increase = Column(Float) - # 拆出资金及买入返售金融资产净增加额 + #: 拆出资金及买入返售金融资产净增加额 fi_lending_and_buy_repurchase_increase = Column(Float) - # 其中:拆出资金净增加额 + #: 其中:拆出资金净增加额 fi_lending_increase = Column(Float) - # 拆入资金及卖出回购金融资产款净减少额 + #: 拆入资金及卖出回购金融资产款净减少额 fi_borrowing_and_sell_repurchase_decrease = Column(Float) - # 其中:拆入资金净减少额 + #: 其中:拆入资金净减少额 fi_borrowing_decrease = Column(Float) - # 卖出回购金融资产净减少额 + #: 卖出回购金融资产净减少额 fi_sell_repurchase_decrease = Column(Float) - # 支付利息、手续费及佣金的现金 + #: 支付利息、手续费及佣金的现金 fi_cash_to_interest_commission = Column(Float) - # 应收账款净增加额 + #: 应收账款净增加额 fi_account_receivable_increase = Column(Float) - # 偿付债券利息支付的现金 + #: 偿付债券利息支付的现金 fi_cash_to_pay_interest = Column(Float) - # 保险相关 - # 收到原保险合同保费取得的现金 + #: 保险相关 + #: 收到原保险合同保费取得的现金 fi_cash_from_premium_of_original = Column(Float) - # 保户储金及投资款净增加额 + #: 保户储金及投资款净增加额 fi_insured_deposit_increase = Column(Float) - # 银行及证券业务卖出回购资金净增加额 + #: 银行及证券业务卖出回购资金净增加额 fi_bank_broker_sell_repurchase_increase = Column(Float) - # 银行及证券业务买入返售资金净减少额 + #: 银行及证券业务买入返售资金净减少额 fi_bank_broker_buy_repurchase_decrease = Column(Float) - # 支付原保险合同赔付等款项的现金 + #: 支付原保险合同赔付等款项的现金 fi_cash_to_insurance_claim = Column(Float) - # 支付再保险业务现金净额 + #: 支付再保险业务现金净额 fi_cash_to_reinsurance = Column(Float) - # 银行业务及证券业务拆借资金净减少额 + #: 银行业务及证券业务拆借资金净减少额 fi_lending_decrease = Column(Float) - # 银行业务及证券业务卖出回购资金净减少额 + #: 银行业务及证券业务卖出回购资金净减少额 fi_bank_broker_sell_repurchase_decrease = Column(Float) - # 支付保单红利的现金 + #: 支付保单红利的现金 fi_cash_to_dividends = Column(Float) - # 保户质押贷款净增加额 + #: 保户质押贷款净增加额 fi_insured_pledge_loans_increase = Column(Float) - # 收购子公司及其他营业单位支付的现金净额 + #: 收购子公司及其他营业单位支付的现金净额 fi_cash_to_acquire_subsidiaries = Column(Float) - # 处置子公司及其他营业单位流出的现金净额 + #: 处置子公司及其他营业单位流出的现金净额 fi_cash_to_disposal_subsidiaries = Column(Float) - # 支付卖出回购金融资产款现金净额 + #: 支付卖出回购金融资产款现金净额 fi_cash_to_sell_repurchase = Column(Float) - # 券商相关 - # 拆入资金净增加额 + #: 券商相关 + #: 拆入资金净增加额 fi_borrowing_increase = Column(Float) - # 代理买卖证券收到的现金净额 + #: 代理买卖证券收到的现金净额 fi_cash_from_trading_agent = Column(Float) - # 回购业务资金净增加额 + #: 回购业务资金净增加额 fi_cash_from_repurchase_increase = Column(Float) - # 处置交易性金融资产的净减少额 + #: 处置交易性金融资产的净减少额 fi_disposal_trade_asset_decrease = Column(Float) - # 回购业务资金净减少额 + #: 回购业务资金净减少额 fi_repurchase_decrease = Column(Float) - # 代理买卖证券支付的现金净额(净减少额) + #: 代理买卖证券支付的现金净额(净减少额) fi_cash_to_agent_trade = Column(Float) -# 主要财务指标 +#: 主要财务指标 + class FinanceFactor(FinanceBase, Mixin): @classmethod def important_cols(cls): - return ['basic_eps', 'total_op_income', 'net_profit', 'op_income_growth_yoy', 'net_profit_growth_yoy', 'roe', - 'rota', 'gross_profit_margin', 'net_margin'] - - __tablename__ = 'finance_factor' + return [ + "basic_eps", + "total_op_income", + "net_profit", + "op_income_growth_yoy", + "net_profit_growth_yoy", + "roe", + "rota", + "gross_profit_margin", + "net_margin", + ] + + __tablename__ = "finance_factor" provider = Column(String(length=32)) code = Column(String(length=32)) report_period = Column(String(length=32)) report_date = Column(DateTime) - # 每股指标 + #: 每股指标 # - # 基本每股收益(元) + #: 基本每股收益(元) basic_eps = Column(Float) - # 扣非每股收益(元) + #: 扣非每股收益(元) deducted_eps = Column(Float) - # 稀释每股收益(元) + #: 稀释每股收益(元) diluted_eps = Column(Float) - # 每股净资产(元) + #: 每股净资产(元) bps = Column(Float) - # 每股资本公积(元) + #: 每股资本公积(元) capital_reserve_ps = Column(Float) - # 每股未分配利润(元) + #: 每股未分配利润(元) undistributed_profit_ps = Column(Float) - # 每股经营现金流(元) + #: 每股经营现金流(元) op_cash_flow_ps = Column(Float) - # 成长能力指标 + #: 成长能力指标 # - # 营业总收入(元) + #: 营业总收入(元) total_op_income = Column(Float) - # 毛利润(元) + #: 毛利润(元) gross_profit = Column(Float) - # 归属净利润(元) + #: 归属净利润(元) net_profit = Column(Float) - # 扣非净利润(元) + #: 扣非净利润(元) deducted_net_profit = Column(Float) - # 营业总收入同比增长 + #: 营业总收入同比增长 op_income_growth_yoy = Column(Float) - # 归属净利润同比增长 + #: 归属净利润同比增长 net_profit_growth_yoy = Column(Float) - # 扣非净利润同比增长 + #: 扣非净利润同比增长 deducted_net_profit_growth_yoy = Column(Float) - # 营业总收入滚动环比增长 + #: 营业总收入滚动环比增长 op_income_growth_qoq = Column(Float) - # 归属净利润滚动环比增长 + #: 归属净利润滚动环比增长 net_profit_growth_qoq = Column(Float) - # 扣非净利润滚动环比增长 + #: 扣非净利润滚动环比增长 deducted_net_profit_growth_qoq = Column(Float) - # 盈利能力指标 + #: 盈利能力指标 # - # 净资产收益率(加权) + #: 净资产收益率(加权) roe = Column(Float) - # 净资产收益率(扣非/加权) + #: 净资产收益率(扣非/加权) deducted_roe = Column(Float) - # 总资产收益率(加权) + #: 总资产收益率(加权) rota = Column(Float) - # 毛利率 + #: 毛利率 gross_profit_margin = Column(Float) - # 净利率 + #: 净利率 net_margin = Column(Float) - # 收益质量指标 + #: 收益质量指标 # - # 预收账款/营业收入 + #: 预收账款/营业收入 advance_receipts_per_op_income = Column(Float) - # 销售净现金流/营业收入 + #: 销售净现金流/营业收入 sales_net_cash_flow_per_op_income = Column(Float) - # 经营净现金流/营业收入 + #: 经营净现金流/营业收入 op_net_cash_flow_per_op_income = Column(Float) - # 实际税率 + #: 实际税率 actual_tax_rate = Column(Float) - # 财务风险指标 + #: 财务风险指标 # - # 流动比率 + #: 流动比率 current_ratio = Column(Float) - # 速动比率 + #: 速动比率 quick_ratio = Column(Float) - # 现金流量比率 + #: 现金流量比率 cash_flow_ratio = Column(Float) - # 资产负债率 + #: 资产负债率 debt_asset_ratio = Column(Float) - # 权益乘数 + #: 权益乘数 em = Column(Float) - # 产权比率 + #: 产权比率 equity_ratio = Column(Float) - # 营运能力指标(一般企业) + #: 营运能力指标(一般企业) # - # 总资产周转天数(天) + #: 总资产周转天数(天) total_assets_turnover_days = Column(Integer) - # 存货周转天数(天) + #: 存货周转天数(天) inventory_turnover_days = Column(Integer) - # 应收账款周转天数(天) + #: 应收账款周转天数(天) receivables_turnover_days = Column(Integer) - # 总资产周转率(次) + #: 总资产周转率(次) total_assets_turnover = Column(Float) - # 存货周转率(次) + #: 存货周转率(次) inventory_turnover = Column(Float) - # 应收账款周转率(次) + #: 应收账款周转率(次) receivables_turnover = Column(Float) - # 专项指标(银行) + #: 专项指标(银行) # - # 存款总额 + #: 存款总额 fi_total_deposit = Column(Float) - # 贷款总额 + #: 贷款总额 fi_total_loan = Column(Float) - # 存贷款比例 + #: 存贷款比例 fi_loan_deposit_ratio = Column(Float) - # 资本充足率 + #: 资本充足率 fi_capital_adequacy_ratio = Column(Float) - # 核心资本充足率 + #: 核心资本充足率 fi_core_capital_adequacy_ratio = Column(Float) - # 不良贷款率 + #: 不良贷款率 fi_npl_ratio = Column(Float) - # 不良贷款拨备覆盖率 + #: 不良贷款拨备覆盖率 fi_npl_provision_coverage = Column(Float) - # 资本净额 + #: 资本净额 fi_net_capital = Column(Float) - # 专项指标(保险) + #: 专项指标(保险) # - # 总投资收益率 + #: 总投资收益率 insurance_roi = Column(Float) - # 净投资收益率 + #: 净投资收益率 insurance_net_investment_yield = Column(Float) - # 已赚保费 + #: 已赚保费 insurance_earned_premium = Column(Float) - # 赔付支出 + #: 赔付支出 insurance_payout = Column(Float) - # 退保率 + #: 退保率 insurance_surrender_rate = Column(Float) - # 偿付能力充足率 + #: 偿付能力充足率 insurance_solvency_adequacy_ratio = Column(Float) - # 专项指标(券商) + #: 专项指标(券商) # - # 净资本 + #: 净资本 broker_net_capital = Column(Float) - # 净资产 + #: 净资产 broker_net_assets = Column(Float) - # 净资本/净资产 + #: 净资本/净资产 broker_net_capital_assets_ratio = Column(Float) - # 自营固定收益类证券规模/净资本 + #: 自营固定收益类证券规模/净资本 broker_self_operated_fixed_income_securities_net_capital_ratio = Column(Float) -register_schema(providers=['eastmoney'], db_name='finance', schema_base=FinanceBase, entity_type='stock') +register_schema(providers=["eastmoney"], db_name="finance", schema_base=FinanceBase, entity_type="stock") + # the __all__ is generated -__all__ = ['BalanceSheet', 'IncomeStatement', 'CashFlowStatement', 'FinanceFactor'] \ No newline at end of file +__all__ = ["BalanceSheet", "IncomeStatement", "CashFlowStatement", "FinanceFactor"] diff --git a/src/zvt/domain/fundamental/trading.py b/src/zvt/domain/fundamental/trading.py new file mode 100644 index 00000000..94eff5f0 --- /dev/null +++ b/src/zvt/domain/fundamental/trading.py @@ -0,0 +1,165 @@ +# -*- coding: utf-8 -*- +from sqlalchemy import Column, String, Float +from sqlalchemy.orm import declarative_base + +from zvt.contract import Mixin +from zvt.contract.register import register_schema + +TradingBase = declarative_base() + + +class ManagerTrading(TradingBase, Mixin): + __tablename__ = "manager_trading" + + provider = Column(String(length=32)) + code = Column(String(length=32)) + #: 日期 变动人 变动数量(股) 交易均价(元) 结存股票(股) 交易方式 董监高管 高管职位 与高管关系 + #: 2017-08-11 韦春 200 9.16 -- 竞价交易 刘韬 高管 兄弟姐妹 + + #: 变动人 + trading_person = Column(String(length=32)) + #: 变动数量 + volume = Column(Float) + #: 交易均价 + price = Column(Float) + #: 结存股票 + holding = Column(Float) + #: 交易方式 + trading_way = Column(String(length=32)) + #: 董监高管 + manager = Column(String(length=32)) + #: 高管职位 + manager_position = Column(String(length=32)) + #: 与高管关系 + relationship_with_manager = Column(String(length=32)) + + +class HolderTrading(TradingBase, Mixin): + __tablename__ = "holder_trading" + + provider = Column(String(length=32)) + code = Column(String(length=32)) + + #: 股东名称 + holder_name = Column(String(length=32)) + #: 变动数量 + volume = Column(Float) + #: 变动比例 + change_pct = Column(Float) + #: 变动后持股比例 + holding_pct = Column(Float) + + +class BigDealTrading(TradingBase, Mixin): + __tablename__ = "big_deal_trading" + + provider = Column(String(length=32)) + code = Column(String(length=32)) + + #: 成交额 + turnover = Column(Float) + #: 成交价 + price = Column(Float) + #: 卖出营业部 + sell_broker = Column(String(length=128)) + #: 买入营业部 + buy_broker = Column(String(length=128)) + #: 折/溢价率 + compare_rate = Column(Float) + + +class MarginTrading(TradingBase, Mixin): + __tablename__ = "margin_trading" + code = Column(String(length=32)) + + #: 融资余额(元) + fin_value = Column(Float) + #: 融资买入额(元) + fin_buy_value = Column(Float) + #: 融资偿还额(元) + fin_refund_value = Column(Float) + #: 融券余量(股) + sec_value = Column(Float) + #: 融券卖出量(股) + sec_sell_value = Column(Float) + #: 融券偿还量(股) + sec_refund_value = Column(Float) + #: 融资融券余额(元) + fin_sec_value = Column(Float) + + +class DragonAndTiger(TradingBase, Mixin): + __tablename__ = "dragon_and_tiger" + + code = Column(String(length=32)) + name = Column(String(length=32)) + + #: 异动原因 + reason = Column(String(length=128)) + #: 成交额 + turnover = Column(Float) + #: 涨幅 + change_pct = Column(Float) + #: 净买入 + net_in = Column(Float) + + #: 买入营业部 + dep1 = Column(String(length=128)) + dep1_in = Column(Float) + dep1_out = Column(Float) + dep1_rate = Column(Float) + + dep2 = Column(String(length=128)) + dep2_in = Column(Float) + dep2_out = Column(Float) + dep2_rate = Column(Float) + + dep3 = Column(String(length=128)) + dep3_in = Column(Float) + dep3_out = Column(Float) + dep3_rate = Column(Float) + + dep4 = Column(String(length=128)) + dep4_in = Column(Float) + dep4_out = Column(Float) + dep4_rate = Column(Float) + + dep5 = Column(String(length=128)) + dep5_in = Column(Float) + dep5_out = Column(Float) + dep5_rate = Column(Float) + + #: 卖出营业部 + dep_1 = Column(String(length=128)) + dep_1_in = Column(Float) + dep_1_out = Column(Float) + dep_1_rate = Column(Float) + + dep_2 = Column(String(length=128)) + dep_2_in = Column(Float) + dep_2_out = Column(Float) + dep_2_rate = Column(Float) + + dep_3 = Column(String(length=128)) + dep_3_in = Column(Float) + dep_3_out = Column(Float) + dep_3_rate = Column(Float) + + dep_4 = Column(String(length=128)) + dep_4_in = Column(Float) + dep_4_out = Column(Float) + dep_4_rate = Column(Float) + + dep_5 = Column(String(length=128)) + dep_5_in = Column(Float) + dep_5_out = Column(Float) + dep_5_rate = Column(Float) + + +register_schema( + providers=["em", "eastmoney", "joinquant"], db_name="trading", schema_base=TradingBase, entity_type="stock" +) + + +# the __all__ is generated +__all__ = ["ManagerTrading", "HolderTrading", "BigDealTrading", "MarginTrading", "DragonAndTiger"] diff --git a/zvt/domain/fundamental/valuation.py b/src/zvt/domain/fundamental/valuation.py similarity index 60% rename from zvt/domain/fundamental/valuation.py rename to src/zvt/domain/fundamental/valuation.py index 96541168..594ace7e 100644 --- a/zvt/domain/fundamental/valuation.py +++ b/src/zvt/domain/fundamental/valuation.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- from sqlalchemy import Column, String, Float -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import declarative_base from zvt.contract import Mixin from zvt.contract.register import register_schema @@ -9,60 +9,61 @@ class StockValuation(ValuationBase, Mixin): - __tablename__ = 'stock_valuation' + __tablename__ = "stock_valuation" code = Column(String(length=32)) name = Column(String(length=32)) - # 总股本(股) + #: 总股本(股) capitalization = Column(Float) - # 公司已发行的普通股股份总数(包含A股,B股和H股的总股本) + #: 公司已发行的普通股股份总数(包含A股,B股和H股的总股本) circulating_cap = Column(Float) - # 市值 + #: 市值 market_cap = Column(Float) - # 流通市值 + #: 流通市值 circulating_market_cap = Column(Float) - # 换手率 + #: 换手率 turnover_ratio = Column(Float) - # 静态pe + #: 静态pe pe = Column(Float) - # 动态pe + #: 动态pe pe_ttm = Column(Float) - # 市净率 + #: 市净率 pb = Column(Float) - # 市销率 + #: 市销率 ps = Column(Float) - # 市现率 + #: 市现率 pcf = Column(Float) class EtfValuation(ValuationBase, Mixin): - __tablename__ = 'etf_valuation' + __tablename__ = "etf_valuation" code = Column(String(length=32)) name = Column(String(length=32)) - # 静态pe + #: 静态pe pe = Column(Float) - # 加权 + #: 加权 pe1 = Column(Float) - # 动态pe + #: 动态pe pe_ttm = Column(Float) - # 加权 + #: 加权 pe_ttm1 = Column(Float) - # 市净率 + #: 市净率 pb = Column(Float) - # 加权 + #: 加权 pb1 = Column(Float) - # 市销率 + #: 市销率 ps = Column(Float) - # 加权 + #: 加权 ps1 = Column(Float) - # 市现率 + #: 市现率 pcf = Column(Float) - # 加权 + #: 加权 pcf1 = Column(Float) -register_schema(providers=['joinquant'], db_name='valuation', schema_base=ValuationBase, entity_type='stock') +register_schema(providers=["joinquant"], db_name="valuation", schema_base=ValuationBase, entity_type="stock") + # the __all__ is generated -__all__ = ['StockValuation', 'EtfValuation'] \ No newline at end of file +__all__ = ["StockValuation", "EtfValuation"] diff --git a/src/zvt/domain/macro/__init__.py b/src/zvt/domain/macro/__init__.py new file mode 100644 index 00000000..4bd907b6 --- /dev/null +++ b/src/zvt/domain/macro/__init__.py @@ -0,0 +1,21 @@ +# -*- coding: utf-8 -*- + + +# the __all__ is generated +__all__ = [] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule monetary +from .monetary import * +from .monetary import __all__ as _monetary_all + +__all__ += _monetary_all + +# import all from submodule macro +from .macro import * +from .macro import __all__ as _macro_all + +__all__ += _macro_all diff --git a/src/zvt/domain/macro/macro.py b/src/zvt/domain/macro/macro.py new file mode 100644 index 00000000..1a2a7ff2 --- /dev/null +++ b/src/zvt/domain/macro/macro.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +from sqlalchemy import Column, String, Float, BIGINT +from sqlalchemy.orm import declarative_base + +from zvt.contract import Mixin +from zvt.contract.register import register_schema + +MacroBase = declarative_base() + + +class Economy(MacroBase, Mixin): + # https://datatopics.worldbank.org/world-development-indicators//themes/economy.html + __tablename__ = "economy" + + code = Column(String(length=32)) + name = Column(String(length=32)) + population = Column(BIGINT) + + gdp = Column(Float) + gdp_per_capita = Column(Float) + gdp_per_employed = Column(Float) + gdp_growth = Column(Float) + agriculture_growth = Column(Float) + industry_growth = Column(Float) + manufacturing_growth = Column(Float) + service_growth = Column(Float) + consumption_growth = Column(Float) + capital_growth = Column(Float) + exports_growth = Column(Float) + imports_growth = Column(Float) + + gni = Column(Float) + gni_per_capita = Column(Float) + + gross_saving = Column(Float) + cpi = Column(Float) + unemployment_rate = Column(Float) + fdi_of_gdp = Column(Float) + + +register_schema(providers=["wb"], db_name="macro", schema_base=MacroBase) + + +# the __all__ is generated +__all__ = ["Economy"] diff --git a/src/zvt/domain/macro/monetary.py b/src/zvt/domain/macro/monetary.py new file mode 100644 index 00000000..582c9d37 --- /dev/null +++ b/src/zvt/domain/macro/monetary.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +from sqlalchemy import Column, String, Float +from sqlalchemy.orm import declarative_base + +from zvt.contract import Mixin +from zvt.contract.register import register_schema + +MonetaryBase = declarative_base() + + +class TreasuryYield(MonetaryBase, Mixin): + __tablename__ = "treasury_yield" + + code = Column(String(length=32)) + + # 2年期 + yield_2 = Column(Float) + # 5年期 + yield_5 = Column(Float) + # 10年期 + yield_10 = Column(Float) + # 30年期 + yield_30 = Column(Float) + + +register_schema(providers=["em"], db_name="monetary", schema_base=MonetaryBase) + + +# the __all__ is generated +__all__ = ["TreasuryYield"] diff --git a/src/zvt/domain/meta/__init__.py b/src/zvt/domain/meta/__init__.py new file mode 100644 index 00000000..0c4b1f52 --- /dev/null +++ b/src/zvt/domain/meta/__init__.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*-# + +# the __all__ is generated +__all__ = [] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule stockus_meta +from .stockus_meta import * +from .stockus_meta import __all__ as _stockus_meta_all + +__all__ += _stockus_meta_all + +# import all from submodule stockhk_meta +from .stockhk_meta import * +from .stockhk_meta import __all__ as _stockhk_meta_all + +__all__ += _stockhk_meta_all + +# import all from submodule indexus_meta +from .indexus_meta import * +from .indexus_meta import __all__ as _indexus_meta_all + +__all__ += _indexus_meta_all + +# import all from submodule country_meta +from .country_meta import * +from .country_meta import __all__ as _country_meta_all + +__all__ += _country_meta_all + +# import all from submodule cbond_meta +from .cbond_meta import * +from .cbond_meta import __all__ as _cbond_meta_all + +__all__ += _cbond_meta_all + +# import all from submodule index_meta +from .index_meta import * +from .index_meta import __all__ as _index_meta_all + +__all__ += _index_meta_all + +# import all from submodule future_meta +from .future_meta import * +from .future_meta import __all__ as _future_meta_all + +__all__ += _future_meta_all + +# import all from submodule etf_meta +from .etf_meta import * +from .etf_meta import __all__ as _etf_meta_all + +__all__ += _etf_meta_all + +# import all from submodule currency_meta +from .currency_meta import * +from .currency_meta import __all__ as _currency_meta_all + +__all__ += _currency_meta_all + +# import all from submodule stock_meta +from .stock_meta import * +from .stock_meta import __all__ as _stock_meta_all + +__all__ += _stock_meta_all + +# import all from submodule block_meta +from .block_meta import * +from .block_meta import __all__ as _block_meta_all + +__all__ += _block_meta_all + +# import all from submodule fund_meta +from .fund_meta import * +from .fund_meta import __all__ as _fund_meta_all + +__all__ += _fund_meta_all diff --git a/src/zvt/domain/meta/block_meta.py b/src/zvt/domain/meta/block_meta.py new file mode 100644 index 00000000..3a1b8a8a --- /dev/null +++ b/src/zvt/domain/meta/block_meta.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- + +from sqlalchemy import Column, String +from sqlalchemy.orm import declarative_base + +from zvt.contract import Portfolio, PortfolioStock +from zvt.contract.register import register_schema, register_entity + +BlockMetaBase = declarative_base() + + +#: 板块 +@register_entity(entity_type="block") +class Block(BlockMetaBase, Portfolio): + __tablename__ = "block" + + #: 板块类型,行业(industry),概念(concept) + category = Column(String(length=64)) + + +class BlockStock(BlockMetaBase, PortfolioStock): + __tablename__ = "block_stock" + + +register_schema(providers=["em", "eastmoney", "sina"], db_name="block_meta", schema_base=BlockMetaBase) + + +# the __all__ is generated +__all__ = ["Block", "BlockStock"] diff --git a/src/zvt/domain/meta/cbond_meta.py b/src/zvt/domain/meta/cbond_meta.py new file mode 100644 index 00000000..9718bd4e --- /dev/null +++ b/src/zvt/domain/meta/cbond_meta.py @@ -0,0 +1,21 @@ +# -*- coding: utf-8 -*- + +from sqlalchemy.orm import declarative_base + +from zvt.contract import TradableEntity +from zvt.contract.register import register_schema, register_entity + +CBondBase = declarative_base() + + +#: 美股 +@register_entity(entity_type="cbond") +class CBond(CBondBase, TradableEntity): + __tablename__ = "cbond" + + +register_schema(providers=["em"], db_name="cbond_meta", schema_base=CBondBase) + + +# the __all__ is generated +__all__ = ["CBond"] diff --git a/src/zvt/domain/meta/country_meta.py b/src/zvt/domain/meta/country_meta.py new file mode 100644 index 00000000..dbd3a9f7 --- /dev/null +++ b/src/zvt/domain/meta/country_meta.py @@ -0,0 +1,40 @@ +# -*- coding: utf-8 -*- + +from sqlalchemy import Column, String, Float +from sqlalchemy.orm import declarative_base + +from zvt.contract.register import register_schema, register_entity +from zvt.contract.schema import TradableEntity + +CountryMetaBase = declarative_base() + + +@register_entity(entity_type="country") +class Country(CountryMetaBase, TradableEntity): + __tablename__ = "country" + + #: 区域 + #: region + region = Column(String(length=128)) + #: 首都 + #: capital city + capital_city = Column(String(length=128)) + #: 收入水平 + #: income level + income_level = Column(String(length=64)) + #: 贷款类型 + #: lending type + lending_type = Column(String(length=64)) + #: 经度 + #: longitude + longitude = Column(Float) + #: 纬度 + #: latitude + latitude = Column(Float) + + +register_schema(providers=["wb"], db_name="country_meta", schema_base=CountryMetaBase) + + +# the __all__ is generated +__all__ = ["Country"] diff --git a/src/zvt/domain/meta/currency_meta.py b/src/zvt/domain/meta/currency_meta.py new file mode 100644 index 00000000..94d6bdcc --- /dev/null +++ b/src/zvt/domain/meta/currency_meta.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- + +from sqlalchemy.orm import declarative_base + +from zvt.contract.register import register_schema, register_entity +from zvt.contract.schema import TradableEntity + +CurrencyMetaBase = declarative_base() + + +@register_entity(entity_type="currency") +class Currency(CurrencyMetaBase, TradableEntity): + __tablename__ = "currency" + + +register_schema(providers=["em"], db_name="currency_meta", schema_base=CurrencyMetaBase) + + +# the __all__ is generated +__all__ = ["Currency"] diff --git a/src/zvt/domain/meta/etf_meta.py b/src/zvt/domain/meta/etf_meta.py new file mode 100644 index 00000000..66d02c0b --- /dev/null +++ b/src/zvt/domain/meta/etf_meta.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- + +from sqlalchemy import Column, String +from sqlalchemy.orm import declarative_base + +from zvt.contract import Portfolio, PortfolioStockHistory +from zvt.contract.register import register_schema, register_entity +from zvt.utils.time_utils import now_pd_timestamp + +EtfMetaBase = declarative_base() + + +#: etf +@register_entity(entity_type="etf") +class Etf(EtfMetaBase, Portfolio): + __tablename__ = "etf" + category = Column(String(length=64)) + + @classmethod + def get_stocks(cls, code=None, codes=None, ids=None, timestamp=now_pd_timestamp(), provider=None): + from zvt.api.portfolio import get_etf_stocks + + return get_etf_stocks(code=code, codes=codes, ids=ids, timestamp=timestamp, provider=provider) + + +class EtfStock(EtfMetaBase, PortfolioStockHistory): + __tablename__ = "etf_stock" + + +register_schema(providers=["exchange", "joinquant"], db_name="etf_meta", schema_base=EtfMetaBase) + + +# the __all__ is generated +__all__ = ["Etf", "EtfStock"] diff --git a/src/zvt/domain/meta/fund_meta.py b/src/zvt/domain/meta/fund_meta.py new file mode 100644 index 00000000..bc213ceb --- /dev/null +++ b/src/zvt/domain/meta/fund_meta.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +from sqlalchemy import Column, String, Integer +from sqlalchemy.orm import declarative_base + +from zvt.contract import Portfolio, PortfolioStockHistory +from zvt.contract.register import register_entity, register_schema +from zvt.utils.time_utils import now_pd_timestamp + +FundMetaBase = declarative_base() + + +#: 个股 +@register_entity(entity_type="fund") +class Fund(FundMetaBase, Portfolio): + __tablename__ = "fund" + #: 基金管理人 + advisor = Column(String(length=100)) + #: 基金托管人 + trustee = Column(String(length=100)) + + #: 编码 基金运作方式 + #: 401001 开放式基金 + #: 401002 封闭式基金 + #: 401003 QDII + #: 401004 FOF + #: 401005 ETF + #: 401006 LOF + #: 基金运作方式编码 + operate_mode_id = Column(Integer) + #: 基金运作方式 + operate_mode = Column(String(length=32)) + + #: 编码 基金类别 + #: 402001 股票型 + #: 402002 货币型 + #: 402003 债券型 + #: 402004 混合型 + #: 402005 基金型 + #: 402006 贵金属 + #: 402007 封闭式 + #: 投资标的类型编码 + underlying_asset_type_id = Column(Integer) + #: 投资标的类型 + underlying_asset_type = Column(String(length=32)) + + @classmethod + def get_stocks(cls, code=None, codes=None, ids=None, timestamp=now_pd_timestamp(), provider=None): + from zvt.api.portfolio import get_fund_stocks + + return get_fund_stocks(code=code, codes=codes, ids=ids, timestamp=timestamp, provider=provider) + + +class FundStock(FundMetaBase, PortfolioStockHistory): + __tablename__ = "fund_stock" + + +register_schema(providers=["joinquant"], db_name="fund_meta", schema_base=FundMetaBase) + + +# the __all__ is generated +__all__ = ["Fund", "FundStock"] diff --git a/src/zvt/domain/meta/future_meta.py b/src/zvt/domain/meta/future_meta.py new file mode 100644 index 00000000..c77ce9f7 --- /dev/null +++ b/src/zvt/domain/meta/future_meta.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +from sqlalchemy.orm import declarative_base + +from zvt.contract.register import register_schema, register_entity +from zvt.contract.schema import TradableEntity + +FutureMetaBase = declarative_base() + + +@register_entity(entity_type="future") +class Future(FutureMetaBase, TradableEntity): + __tablename__ = "future" + + +register_schema(providers=["em"], db_name="future_meta", schema_base=FutureMetaBase) + + +# the __all__ is generated +__all__ = ["Future"] diff --git a/src/zvt/domain/meta/index_meta.py b/src/zvt/domain/meta/index_meta.py new file mode 100644 index 00000000..0cb851d0 --- /dev/null +++ b/src/zvt/domain/meta/index_meta.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- + +from sqlalchemy import Column, String, Float +from sqlalchemy.orm import declarative_base + +from zvt.contract import Portfolio, PortfolioStockHistory +from zvt.contract.register import register_schema, register_entity + +IndexMetaBase = declarative_base() + + +#: 指数 +@register_entity(entity_type="index") +class Index(IndexMetaBase, Portfolio): + __tablename__ = "index" + + #: 发布商 + publisher = Column(String(length=64)) + #: 类别 + #: see IndexCategory + category = Column(String(length=64)) + #: 基准点数 + base_point = Column(Float) + + +class IndexStock(IndexMetaBase, PortfolioStockHistory): + __tablename__ = "index_stock" + + +register_schema(providers=["em", "exchange"], db_name="index_meta", schema_base=IndexMetaBase) + + +# the __all__ is generated +__all__ = ["Index", "IndexStock"] diff --git a/src/zvt/domain/meta/indexus_meta.py b/src/zvt/domain/meta/indexus_meta.py new file mode 100644 index 00000000..377133fc --- /dev/null +++ b/src/zvt/domain/meta/indexus_meta.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- + +from sqlalchemy import Column, String, Float +from sqlalchemy.orm import declarative_base + +from zvt.contract import Portfolio +from zvt.contract.register import register_schema, register_entity + +IndexusMetaBase = declarative_base() + + +#: 美股指数 +@register_entity(entity_type="indexus") +class Indexus(IndexusMetaBase, Portfolio): + __tablename__ = "index" + + #: 发布商 + publisher = Column(String(length=64)) + #: 类别 + #: see IndexCategory + category = Column(String(length=64)) + #: 基准点数 + base_point = Column(Float) + + +register_schema(providers=["em"], db_name="indexus_meta", schema_base=IndexusMetaBase) + + +# the __all__ is generated +__all__ = ["Indexus"] diff --git a/src/zvt/domain/meta/stock_meta.py b/src/zvt/domain/meta/stock_meta.py new file mode 100644 index 00000000..98177442 --- /dev/null +++ b/src/zvt/domain/meta/stock_meta.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- + +from sqlalchemy import Column, String, DateTime, BigInteger, Float +from sqlalchemy.orm import declarative_base + +from zvt.contract import TradableEntity +from zvt.contract.register import register_schema, register_entity + +StockMetaBase = declarative_base() + + +#: 个股 +@register_entity(entity_type="stock") +class Stock(StockMetaBase, TradableEntity): + __tablename__ = "stock" + #: 股东上次更新时间 + holder_modified_date = Column(DateTime) + #: 控股股东 + controlling_holder = Column(String) + #: 实际控制人 + controlling_holder_parent = Column(String) + #: 前十大股东占比 + top_ten_ratio = Column(Float) + + +#: 个股详情 +class StockDetail(StockMetaBase, TradableEntity): + __tablename__ = "stock_detail" + + #: 所属行业 + industries = Column(String) + #: 行业指数 + industry_indices = Column(String) + #: 所属板块 + concept_indices = Column(String) + #: 所属区域 + area_indices = Column(String) + + #: 成立日期 + date_of_establishment = Column(DateTime) + #: 公司简介 + profile = Column(String(length=1024)) + #: 主营业务 + main_business = Column(String(length=512)) + #: 发行量(股) + issues = Column(BigInteger) + #: 发行价格 + price = Column(Float) + #: 募资净额(元) + raising_fund = Column(Float) + #: 发行市盈率 + issue_pe = Column(Float) + #: 网上中签率 + net_winning_rate = Column(Float) + + +register_schema( + providers=["exchange", "joinquant", "eastmoney", "em", "qmt"], db_name="stock_meta", schema_base=StockMetaBase +) + + +# the __all__ is generated +__all__ = ["Stock", "StockDetail"] diff --git a/src/zvt/domain/meta/stockhk_meta.py b/src/zvt/domain/meta/stockhk_meta.py new file mode 100644 index 00000000..730ba747 --- /dev/null +++ b/src/zvt/domain/meta/stockhk_meta.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +from sqlalchemy import Column, Boolean +from sqlalchemy.orm import declarative_base + +from zvt.contract import TradableEntity +from zvt.contract.register import register_schema, register_entity + +StockhkMetaBase = declarative_base() + + +#: 港股 +@register_entity(entity_type="stockhk") +class Stockhk(StockhkMetaBase, TradableEntity): + __tablename__ = "stockhk" + #: 是否属于港股通 + south = Column(Boolean) + + @classmethod + def get_trading_t(cls): + """ + 0 means t+0 + 1 means t+1 + + :return: + """ + return 0 + + @classmethod + def get_trading_intervals(cls, include_bidding_time=False): + """ + overwrite it to get the trading intervals of the entity + + :return: list of time intervals, in format [(start,end)] + """ + if include_bidding_time: + return [("09:15", "12:00"), ("13:00", "16:00")] + else: + return [("09:30", "12:00"), ("13:00", "16:00")] + + +register_schema(providers=["em"], db_name="stockhk_meta", schema_base=StockhkMetaBase) + + +# the __all__ is generated +__all__ = ["Stockhk"] diff --git a/src/zvt/domain/meta/stockus_meta.py b/src/zvt/domain/meta/stockus_meta.py new file mode 100644 index 00000000..4d2a8a54 --- /dev/null +++ b/src/zvt/domain/meta/stockus_meta.py @@ -0,0 +1,21 @@ +# -*- coding: utf-8 -*- + +from sqlalchemy.orm import declarative_base + +from zvt.contract import TradableEntity +from zvt.contract.register import register_schema, register_entity + +StockusMetaBase = declarative_base() + + +#: 美股 +@register_entity(entity_type="stockus") +class Stockus(StockusMetaBase, TradableEntity): + __tablename__ = "stockus" + + +register_schema(providers=["em"], db_name="stockus_meta", schema_base=StockusMetaBase) + + +# the __all__ is generated +__all__ = ["Stockus"] diff --git a/zvt/domain/misc/__init__.py b/src/zvt/domain/misc/__init__.py similarity index 76% rename from zvt/domain/misc/__init__.py rename to src/zvt/domain/misc/__init__.py index 137658a5..2a38f73f 100644 --- a/zvt/domain/misc/__init__.py +++ b/src/zvt/domain/misc/__init__.py @@ -1,4 +1,6 @@ # -*- coding: utf-8 -*- + + # the __all__ is generated __all__ = [] @@ -9,14 +11,23 @@ # import all from submodule overall from .overall import * from .overall import __all__ as _overall_all + __all__ += _overall_all # import all from submodule money_flow from .money_flow import * from .money_flow import __all__ as _money_flow_all + __all__ += _money_flow_all # import all from submodule holder from .holder import * from .holder import __all__ as _holder_all -__all__ += _holder_all \ No newline at end of file + +__all__ += _holder_all + +# import all from submodule stock_news +from .stock_news import * +from .stock_news import __all__ as _stock_news_all + +__all__ += _stock_news_all diff --git a/zvt/domain/misc/holder.py b/src/zvt/domain/misc/holder.py similarity index 64% rename from zvt/domain/misc/holder.py rename to src/zvt/domain/misc/holder.py index 69fe0068..9b8fc213 100644 --- a/zvt/domain/misc/holder.py +++ b/src/zvt/domain/misc/holder.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- from sqlalchemy import Column, String, DateTime, Float -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import declarative_base from zvt.contract import Mixin from zvt.contract.register import register_schema @@ -9,25 +9,25 @@ class HkHolder(HolderBase, Mixin): - __tablename__ = 'hk_holder' - # 股票代码 + __tablename__ = "hk_holder" + #: 股票代码 code = Column(String(length=32)) - # 股票名称 + #: 股票名称 name = Column(String(length=32)) - # 市场通编码 三种类型:310001-沪股通,310002-深股通,310005-港股通 + #: 市场通编码 三种类型:310001-沪股通,310002-深股通,310005-港股通 holder_code = Column(String(length=32)) - # 市场通名称 三种类型:沪股通,深股通,港股通 + #: 市场通名称 三种类型:沪股通,深股通,港股通 holder_name = Column(String(length=32)) - # 持股数量 + #: 持股数量 share_number = Column(Float) - # 持股比例 + #: 持股比例 share_ratio = Column(Float) class TopTenTradableHolder(HolderBase, Mixin): - __tablename__ = 'top_ten_tradable_holder' + __tablename__ = "top_ten_tradable_holder" provider = Column(String(length=32)) code = Column(String(length=32)) @@ -35,22 +35,22 @@ class TopTenTradableHolder(HolderBase, Mixin): report_period = Column(String(length=32)) report_date = Column(DateTime) - # 股东代码 + #: 股东代码 holder_code = Column(String(length=32)) - # 股东名称 + #: 股东名称 holder_name = Column(String(length=32)) - # 持股数 + #: 持股数 shareholding_numbers = Column(Float) - # 持股比例 + #: 持股比例 shareholding_ratio = Column(Float) - # 变动 + #: 变动 change = Column(Float) - # 变动比例 + #: 变动比例 change_ratio = Column(Float) class TopTenHolder(HolderBase, Mixin): - __tablename__ = 'top_ten_holder' + __tablename__ = "top_ten_holder" provider = Column(String(length=32)) code = Column(String(length=32)) @@ -58,22 +58,22 @@ class TopTenHolder(HolderBase, Mixin): report_period = Column(String(length=32)) report_date = Column(DateTime) - # 股东代码 + #: 股东代码 holder_code = Column(String(length=32)) - # 股东名称 + #: 股东名称 holder_name = Column(String(length=32)) - # 持股数 + #: 持股数 shareholding_numbers = Column(Float) - # 持股比例 + #: 持股比例 shareholding_ratio = Column(Float) - # 变动 + #: 变动 change = Column(Float) - # 变动比例 + #: 变动比例 change_ratio = Column(Float) class InstitutionalInvestorHolder(HolderBase, Mixin): - __tablename__ = 'institutional_investor_holder' + __tablename__ = "institutional_investor_holder" provider = Column(String(length=32)) code = Column(String(length=32)) @@ -81,19 +81,20 @@ class InstitutionalInvestorHolder(HolderBase, Mixin): report_period = Column(String(length=32)) report_date = Column(DateTime) - # 机构类型 + #: 机构类型 institutional_investor_type = Column(String(length=64)) - # 股东代码 + #: 股东代码 holder_code = Column(String(length=32)) - # 股东名称 + #: 股东名称 holder_name = Column(String(length=32)) - # 持股数 + #: 持股数 shareholding_numbers = Column(Float) - # 持股比例 + #: 持股比例 shareholding_ratio = Column(Float) -register_schema(providers=['eastmoney', 'joinquant'], db_name='holder', schema_base=HolderBase, entity_type='stock') +register_schema(providers=["eastmoney", "joinquant"], db_name="holder", schema_base=HolderBase, entity_type="stock") + # the __all__ is generated -__all__ = ['HkHolder', 'TopTenTradableHolder', 'TopTenHolder', 'InstitutionalInvestorHolder'] \ No newline at end of file +__all__ = ["HkHolder", "TopTenTradableHolder", "TopTenHolder", "InstitutionalInvestorHolder"] diff --git a/zvt/domain/misc/money_flow.py b/src/zvt/domain/misc/money_flow.py similarity index 73% rename from zvt/domain/misc/money_flow.py rename to src/zvt/domain/misc/money_flow.py index 3a99fdab..e3995c5f 100644 --- a/zvt/domain/misc/money_flow.py +++ b/src/zvt/domain/misc/money_flow.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- from sqlalchemy import Column, String, Float -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import declarative_base from zvt.contract import Mixin from zvt.contract.register import register_schema @@ -8,106 +8,108 @@ MoneyFlowBase = declarative_base() -# 板块资金流向 +#: 板块资金流向 + class BlockMoneyFlow(MoneyFlowBase, Mixin): - __tablename__ = 'block_money_flow' + __tablename__ = "block_money_flow" code = Column(String(length=32)) name = Column(String(length=32)) - # 收盘价 + #: 收盘价 close = Column(Float) change_pct = Column(Float) turnover_rate = Column(Float) - # 净流入 + #: 净流入 net_inflows = Column(Float) - # 净流入率 + #: 净流入率 net_inflow_rate = Column(Float) - # 主力=超大单+大单 + #: 主力=超大单+大单 net_main_inflows = Column(Float) net_main_inflow_rate = Column(Float) - # 超大单 + #: 超大单 net_huge_inflows = Column(Float) net_huge_inflow_rate = Column(Float) - # 大单 + #: 大单 net_big_inflows = Column(Float) net_big_inflow_rate = Column(Float) - # 中单 + #: 中单 net_medium_inflows = Column(Float) net_medium_inflow_rate = Column(Float) - # 小单 + #: 小单 net_small_inflows = Column(Float) net_small_inflow_rate = Column(Float) class StockMoneyFlow(MoneyFlowBase, Mixin): - __tablename__ = 'stock_money_flow' + __tablename__ = "stock_money_flow" code = Column(String(length=32)) name = Column(String(length=32)) - # 收盘价 + #: 收盘价 close = Column(Float) change_pct = Column(Float) turnover_rate = Column(Float) - # 净流入 + #: 净流入 net_inflows = Column(Float) - # 净流入率 + #: 净流入率 net_inflow_rate = Column(Float) - # 主力=超大单+大单 + #: 主力=超大单+大单 net_main_inflows = Column(Float) net_main_inflow_rate = Column(Float) - # 超大单 + #: 超大单 net_huge_inflows = Column(Float) net_huge_inflow_rate = Column(Float) - # 大单 + #: 大单 net_big_inflows = Column(Float) net_big_inflow_rate = Column(Float) - # 中单 + #: 中单 net_medium_inflows = Column(Float) net_medium_inflow_rate = Column(Float) - # 小单 + #: 小单 net_small_inflows = Column(Float) net_small_inflow_rate = Column(Float) class IndexMoneyFlow(MoneyFlowBase, Mixin): - __tablename__ = 'index_money_flow' + __tablename__ = "index_money_flow" code = Column(String(length=32)) name = Column(String(length=32)) - # 净流入 + #: 净流入 net_inflows = Column(Float) - # 净流入率 + #: 净流入率 net_inflow_rate = Column(Float) - # 主力=超大单+大单 + #: 主力=超大单+大单 net_main_inflows = Column(Float) net_main_inflow_rate = Column(Float) - # 超大单 + #: 超大单 net_huge_inflows = Column(Float) net_huge_inflow_rate = Column(Float) - # 大单 + #: 大单 net_big_inflows = Column(Float) net_big_inflow_rate = Column(Float) - # 中单 + #: 中单 net_medium_inflows = Column(Float) net_medium_inflow_rate = Column(Float) - # 小单 + #: 小单 net_small_inflows = Column(Float) net_small_inflow_rate = Column(Float) -register_schema(providers=['joinquant', 'sina'], db_name='money_flow', schema_base=MoneyFlowBase, entity_type='stock') +register_schema(providers=["joinquant", "sina"], db_name="money_flow", schema_base=MoneyFlowBase, entity_type="stock") + # the __all__ is generated -__all__ = ['BlockMoneyFlow', 'StockMoneyFlow', 'IndexMoneyFlow'] \ No newline at end of file +__all__ = ["BlockMoneyFlow", "StockMoneyFlow", "IndexMoneyFlow"] diff --git a/zvt/domain/misc/overall.py b/src/zvt/domain/misc/overall.py similarity index 70% rename from zvt/domain/misc/overall.py rename to src/zvt/domain/misc/overall.py index b6be9820..48da1737 100644 --- a/zvt/domain/misc/overall.py +++ b/src/zvt/domain/misc/overall.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- from sqlalchemy import Column, String, Float -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import declarative_base from zvt.contract import Mixin from zvt.contract.register import register_schema @@ -8,10 +8,11 @@ OverallBase = declarative_base() -# 市场整体估值 +#: 市场整体估值 + class StockSummary(OverallBase, Mixin): - __tablename__ = 'stock_summary' + __tablename__ = "stock_summary" provider = Column(String(length=32)) code = Column(String(length=32)) @@ -26,32 +27,34 @@ class StockSummary(OverallBase, Mixin): turnover_rate = Column(Float) -# 融资融券概况 +#: 融资融券概况 + class MarginTradingSummary(OverallBase, Mixin): - __tablename__ = 'margin_trading_summary' + __tablename__ = "margin_trading_summary" provider = Column(String(length=32)) code = Column(String(length=32)) name = Column(String(length=32)) - # 融资余额 + #: 融资余额 margin_value = Column(Float) - # 买入额 + #: 买入额 margin_buy = Column(Float) - # 融券余额 + #: 融券余额 short_value = Column(Float) - # 卖出量 + #: 卖出量 short_volume = Column(Float) - # 融资融券余额 + #: 融资融券余额 total_value = Column(Float) -# 北向/南向成交概况 +#: 北向/南向成交概况 + class CrossMarketSummary(OverallBase, Mixin): - __tablename__ = 'cross_market_summary' + __tablename__ = "cross_market_summary" provider = Column(String(length=32)) code = Column(String(length=32)) name = Column(String(length=32)) @@ -64,7 +67,8 @@ class CrossMarketSummary(OverallBase, Mixin): quota_daily_balance = Column(Float) -register_schema(providers=['joinquant', 'exchange'], db_name='overall', schema_base=OverallBase, entity_type='stock') +register_schema(providers=["joinquant", "exchange"], db_name="overall", schema_base=OverallBase, entity_type="stock") + # the __all__ is generated -__all__ = ['StockSummary', 'MarginTradingSummary', 'CrossMarketSummary'] \ No newline at end of file +__all__ = ["StockSummary", "MarginTradingSummary", "CrossMarketSummary"] diff --git a/src/zvt/domain/misc/stock_news.py b/src/zvt/domain/misc/stock_news.py new file mode 100644 index 00000000..c8333b5e --- /dev/null +++ b/src/zvt/domain/misc/stock_news.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +from sqlalchemy import Column, String, JSON, Boolean, DateTime, Integer +from sqlalchemy.orm import declarative_base + +from zvt.contract import Mixin +from zvt.contract.register import register_schema + +NewsBase = declarative_base() + + +class StockNews(NewsBase, Mixin): + __tablename__ = "stock_news" + + #: 新闻编号 + news_code = Column(String) + #: 新闻地址 + news_url = Column(String) + #: 新闻标题 + news_title = Column(String) + #: 新闻内容 + news_content = Column(String) + #: 新闻解读 + news_analysis = Column(JSON) + #: 用户设置为忽略 + ignore_by_user = Column(Boolean, default=False) + + +class StockHotTopic(NewsBase, Mixin): + __tablename__ = "stock_hot_topic" + + #: 出现时间 + created_timestamp = Column(DateTime) + #: 热度排行 + position = Column(Integer) + #: 相关标的 + entity_ids = Column(JSON) + + #: 新闻编号 + news_code = Column(String) + #: 新闻标题 + news_title = Column(String) + #: 新闻内容 + news_content = Column(String) + #: 新闻解读 + news_analysis = Column(JSON) + + +register_schema(providers=["em"], db_name="stock_news", schema_base=NewsBase, entity_type="stock") + + +# the __all__ is generated +__all__ = ["StockNews", "StockHotTopic"] diff --git a/src/zvt/domain/quotes/__init__.py b/src/zvt/domain/quotes/__init__.py new file mode 100644 index 00000000..dad905cc --- /dev/null +++ b/src/zvt/domain/quotes/__init__.py @@ -0,0 +1,191 @@ +# -*- coding: utf-8 -*- +from sqlalchemy import String, Column, Float, Integer, JSON + +from zvt.contract import Mixin + + +class KdataCommon(Mixin): + provider = Column(String(length=32)) + code = Column(String(length=32)) + name = Column(String(length=32)) + # Enum constraint is not extendable + # level = Column(Enum(IntervalLevel, values_callable=enum_value)) + level = Column(String(length=32)) + + # 开盘价 + open = Column(Float) + # 收盘价 + close = Column(Float) + # 最高价 + high = Column(Float) + # 最低价 + low = Column(Float) + # 成交量 + volume = Column(Float) + # 成交金额 + turnover = Column(Float) + # 涨跌幅 + change_pct = Column(Float) + # 换手率 + turnover_rate = Column(Float) + + +class TickCommon(Mixin): + #: UNIX时间戳 + time = Column(Integer) + #: 开盘价 + open = Column(Float) + #: 收盘价/当前价格 + close = Column(Float) + #: 最高价 + high = Column(Float) + #: 最低价 + low = Column(Float) + #: 成交量 + volume = Column(Float) + #: 成交金额 + turnover = Column(Float) + #: 委卖价 + ask_price = Column(Float) + #: 委买价 + bid_price = Column(Float) + #: 委卖量 + ask_vol = Column(JSON) + #: 委买量 + bid_vol = Column(JSON) + #: 成交笔数 + transaction_num = Column(Integer) + + +class BlockKdataCommon(KdataCommon): + pass + + +class IndexKdataCommon(KdataCommon): + pass + + +class IndexusKdataCommon(KdataCommon): + pass + + +class EtfKdataCommon(KdataCommon): + turnover_rate = Column(Float) + + # ETF 累计净值(货币 ETF 为七日年化) + cumulative_net_value = Column(Float) + + +class StockKdataCommon(KdataCommon): + pass + + +class StockusKdataCommon(KdataCommon): + pass + + +class StockhkKdataCommon(KdataCommon): + pass + + +# future common kdata +class FutureKdataCommon(KdataCommon): + #: 持仓量 + interest = Column(Float) + #: 结算价 + settlement = Column(Float) + #: 涨跌幅(按收盘价) + # change_pct = Column(Float) + #: 涨跌幅(按结算价) + change_pct1 = Column(Float) + + +class CurrencyKdataCommon(KdataCommon): + #: 持仓量 + interest = Column(Float) + #: 结算价 + settlement = Column(Float) + #: 涨跌幅(按收盘价) + # change_pct = Column(Float) + #: 涨跌幅(按结算价) + change_pct1 = Column(Float) + + +# the __all__ is generated +__all__ = [ + "KdataCommon", + "TickCommon", + "BlockKdataCommon", + "IndexKdataCommon", + "IndexusKdataCommon", + "EtfKdataCommon", + "StockKdataCommon", + "StockusKdataCommon", + "StockhkKdataCommon", + "FutureKdataCommon", + "CurrencyKdataCommon", +] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule trade_day +from .trade_day import * +from .trade_day import __all__ as _trade_day_all + +__all__ += _trade_day_all + +# import all from submodule indexus +from .indexus import * +from .indexus import __all__ as _indexus_all + +__all__ += _indexus_all + +# import all from submodule stockhk +from .stockhk import * +from .stockhk import __all__ as _stockhk_all + +__all__ += _stockhk_all + +# import all from submodule stockus +from .stockus import * +from .stockus import __all__ as _stockus_all + +__all__ += _stockus_all + +# import all from submodule index +from .index import * +from .index import __all__ as _index_all + +__all__ += _index_all + +# import all from submodule etf +from .etf import * +from .etf import __all__ as _etf_all + +__all__ += _etf_all + +# import all from submodule stock +from .stock import * +from .stock import __all__ as _stock_all + +__all__ += _stock_all + +# import all from submodule currency +from .currency import * +from .currency import __all__ as _currency_all + +__all__ += _currency_all + +# import all from submodule future +from .future import * +from .future import __all__ as _future_all + +__all__ += _future_all + +# import all from submodule block +from .block import * +from .block import __all__ as _block_all + +__all__ += _block_all diff --git a/zvt/domain/quotes/block/__init__.py b/src/zvt/domain/quotes/block/__init__.py similarity index 95% rename from zvt/domain/quotes/block/__init__.py rename to src/zvt/domain/quotes/block/__init__.py index 1b03b21d..f2338896 100644 --- a/zvt/domain/quotes/block/__init__.py +++ b/src/zvt/domain/quotes/block/__init__.py @@ -1,5 +1,7 @@ # -*- coding: utf-8 -*- # this file is generated by gen_kdata_schema function, dont't change it + + # the __all__ is generated __all__ = [] @@ -10,14 +12,17 @@ # import all from submodule block_1d_kdata from .block_1d_kdata import * from .block_1d_kdata import __all__ as _block_1d_kdata_all + __all__ += _block_1d_kdata_all # import all from submodule block_1wk_kdata from .block_1wk_kdata import * from .block_1wk_kdata import __all__ as _block_1wk_kdata_all + __all__ += _block_1wk_kdata_all # import all from submodule block_1mon_kdata from .block_1mon_kdata import * from .block_1mon_kdata import __all__ as _block_1mon_kdata_all -__all__ += _block_1mon_kdata_all \ No newline at end of file + +__all__ += _block_1mon_kdata_all diff --git a/zvt/domain/quotes/block/block_1d_kdata.py b/src/zvt/domain/quotes/block/block_1d_kdata.py similarity index 57% rename from zvt/domain/quotes/block/block_1d_kdata.py rename to src/zvt/domain/quotes/block/block_1d_kdata.py index 900c18af..b71d473d 100644 --- a/zvt/domain/quotes/block/block_1d_kdata.py +++ b/src/zvt/domain/quotes/block/block_1d_kdata.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # this file is generated by gen_kdata_schema function, dont't change it -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import declarative_base from zvt.contract.register import register_schema from zvt.domain.quotes import BlockKdataCommon @@ -9,10 +9,11 @@ class Block1dKdata(KdataBase, BlockKdataCommon): - __tablename__ = 'block_1d_kdata' + __tablename__ = "block_1d_kdata" -register_schema(providers=['eastmoney'], db_name='block_1d_kdata', schema_base=KdataBase, entity_type='block') +register_schema(providers=["em"], db_name="block_1d_kdata", schema_base=KdataBase, entity_type="block") + # the __all__ is generated -__all__ = ['Block1dKdata'] \ No newline at end of file +__all__ = ["Block1dKdata"] diff --git a/zvt/domain/quotes/block/block_1mon_kdata.py b/src/zvt/domain/quotes/block/block_1mon_kdata.py similarity index 56% rename from zvt/domain/quotes/block/block_1mon_kdata.py rename to src/zvt/domain/quotes/block/block_1mon_kdata.py index 691e8f16..a9b5f6d7 100644 --- a/zvt/domain/quotes/block/block_1mon_kdata.py +++ b/src/zvt/domain/quotes/block/block_1mon_kdata.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # this file is generated by gen_kdata_schema function, dont't change it -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import declarative_base from zvt.contract.register import register_schema from zvt.domain.quotes import BlockKdataCommon @@ -9,10 +9,11 @@ class Block1monKdata(KdataBase, BlockKdataCommon): - __tablename__ = 'block_1mon_kdata' + __tablename__ = "block_1mon_kdata" -register_schema(providers=['eastmoney'], db_name='block_1mon_kdata', schema_base=KdataBase, entity_type='block') +register_schema(providers=["em"], db_name="block_1mon_kdata", schema_base=KdataBase, entity_type="block") + # the __all__ is generated -__all__ = ['Block1monKdata'] \ No newline at end of file +__all__ = ["Block1monKdata"] diff --git a/zvt/domain/quotes/block/block_1wk_kdata.py b/src/zvt/domain/quotes/block/block_1wk_kdata.py similarity index 56% rename from zvt/domain/quotes/block/block_1wk_kdata.py rename to src/zvt/domain/quotes/block/block_1wk_kdata.py index 28ef4478..8ba22873 100644 --- a/zvt/domain/quotes/block/block_1wk_kdata.py +++ b/src/zvt/domain/quotes/block/block_1wk_kdata.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # this file is generated by gen_kdata_schema function, dont't change it -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import declarative_base from zvt.contract.register import register_schema from zvt.domain.quotes import BlockKdataCommon @@ -9,10 +9,11 @@ class Block1wkKdata(KdataBase, BlockKdataCommon): - __tablename__ = 'block_1wk_kdata' + __tablename__ = "block_1wk_kdata" -register_schema(providers=['eastmoney'], db_name='block_1wk_kdata', schema_base=KdataBase, entity_type='block') +register_schema(providers=["em"], db_name="block_1wk_kdata", schema_base=KdataBase, entity_type="block") + # the __all__ is generated -__all__ = ['Block1wkKdata'] \ No newline at end of file +__all__ = ["Block1wkKdata"] diff --git a/src/zvt/domain/quotes/currency/__init__.py b/src/zvt/domain/quotes/currency/__init__.py new file mode 100644 index 00000000..51b81592 --- /dev/null +++ b/src/zvt/domain/quotes/currency/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- + + +# the __all__ is generated +__all__ = [] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule currency_1d_kdata +from .currency_1d_kdata import * +from .currency_1d_kdata import __all__ as _currency_1d_kdata_all + +__all__ += _currency_1d_kdata_all diff --git a/src/zvt/domain/quotes/currency/currency_1d_kdata.py b/src/zvt/domain/quotes/currency/currency_1d_kdata.py new file mode 100644 index 00000000..863e88dc --- /dev/null +++ b/src/zvt/domain/quotes/currency/currency_1d_kdata.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +# this file is generated by gen_kdata_schema function, dont't change it +from sqlalchemy.orm import declarative_base + +from zvt.contract.register import register_schema +from zvt.domain.quotes import CurrencyKdataCommon + +KdataBase = declarative_base() + + +class Currency1dKdata(KdataBase, CurrencyKdataCommon): + __tablename__ = "currency_1d_kdata" + + +register_schema(providers=["em"], db_name="currency_1d_kdata", schema_base=KdataBase, entity_type="currency") + + +# the __all__ is generated +__all__ = ["Currency1dKdata"] diff --git a/zvt/domain/quotes/etf/__init__.py b/src/zvt/domain/quotes/etf/__init__.py similarity index 92% rename from zvt/domain/quotes/etf/__init__.py rename to src/zvt/domain/quotes/etf/__init__.py index 895c5ed1..0740f7ed 100644 --- a/zvt/domain/quotes/etf/__init__.py +++ b/src/zvt/domain/quotes/etf/__init__.py @@ -1,5 +1,7 @@ # -*- coding: utf-8 -*- # this file is generated by gen_kdata_schema function, dont't change it + + # the __all__ is generated __all__ = [] @@ -10,4 +12,5 @@ # import all from submodule etf_1d_kdata from .etf_1d_kdata import * from .etf_1d_kdata import __all__ as _etf_1d_kdata_all -__all__ += _etf_1d_kdata_all \ No newline at end of file + +__all__ += _etf_1d_kdata_all diff --git a/zvt/domain/quotes/etf/etf_1d_kdata.py b/src/zvt/domain/quotes/etf/etf_1d_kdata.py similarity index 58% rename from zvt/domain/quotes/etf/etf_1d_kdata.py rename to src/zvt/domain/quotes/etf/etf_1d_kdata.py index 5df30fca..0622e6d4 100644 --- a/zvt/domain/quotes/etf/etf_1d_kdata.py +++ b/src/zvt/domain/quotes/etf/etf_1d_kdata.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # this file is generated by gen_kdata_schema function, dont't change it -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import declarative_base from zvt.contract.register import register_schema from zvt.domain.quotes import EtfKdataCommon @@ -9,10 +9,11 @@ class Etf1dKdata(KdataBase, EtfKdataCommon): - __tablename__ = 'etf_1d_kdata' + __tablename__ = "etf_1d_kdata" -register_schema(providers=['sina'], db_name='etf_1d_kdata', schema_base=KdataBase, entity_type='etf') +register_schema(providers=["sina"], db_name="etf_1d_kdata", schema_base=KdataBase, entity_type="etf") + # the __all__ is generated -__all__ = ['Etf1dKdata'] \ No newline at end of file +__all__ = ["Etf1dKdata"] diff --git a/src/zvt/domain/quotes/future/__init__.py b/src/zvt/domain/quotes/future/__init__.py new file mode 100644 index 00000000..6ac0ca4f --- /dev/null +++ b/src/zvt/domain/quotes/future/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- + + +# the __all__ is generated +__all__ = [] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule future_1d_kdata +from .future_1d_kdata import * +from .future_1d_kdata import __all__ as _future_1d_kdata_all + +__all__ += _future_1d_kdata_all diff --git a/src/zvt/domain/quotes/future/future_1d_kdata.py b/src/zvt/domain/quotes/future/future_1d_kdata.py new file mode 100644 index 00000000..86760a60 --- /dev/null +++ b/src/zvt/domain/quotes/future/future_1d_kdata.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +# this file is generated by gen_kdata_schema function, dont't change it +from sqlalchemy.orm import declarative_base + +from zvt.contract.register import register_schema +from zvt.domain.quotes import FutureKdataCommon + +KdataBase = declarative_base() + + +class Future1dKdata(KdataBase, FutureKdataCommon): + __tablename__ = "future_1d_kdata" + + +register_schema(providers=["em"], db_name="future_1d_kdata", schema_base=KdataBase, entity_type="future") + + +# the __all__ is generated +__all__ = ["Future1dKdata"] diff --git a/zvt/domain/quotes/index/__init__.py b/src/zvt/domain/quotes/index/__init__.py similarity index 78% rename from zvt/domain/quotes/index/__init__.py rename to src/zvt/domain/quotes/index/__init__.py index d73e16a1..386bc716 100644 --- a/zvt/domain/quotes/index/__init__.py +++ b/src/zvt/domain/quotes/index/__init__.py @@ -1,5 +1,7 @@ # -*- coding: utf-8 -*- # this file is generated by gen_kdata_schema function, dont't change it + + # the __all__ is generated __all__ = [] @@ -10,9 +12,15 @@ # import all from submodule index_1d_kdata from .index_1d_kdata import * from .index_1d_kdata import __all__ as _index_1d_kdata_all + __all__ += _index_1d_kdata_all # import all from submodule index_1wk_kdata from .index_1wk_kdata import * from .index_1wk_kdata import __all__ as _index_1wk_kdata_all -__all__ += _index_1wk_kdata_all \ No newline at end of file + +__all__ += _index_1wk_kdata_all + +from .index_1m_kdata import * +from .index_1m_kdata import __all__ as _index_1m_kdata_all +__all__ += _index_1m_kdata_all \ No newline at end of file diff --git a/zvt/domain/quotes/index/index_1d_kdata.py b/src/zvt/domain/quotes/index/index_1d_kdata.py similarity index 56% rename from zvt/domain/quotes/index/index_1d_kdata.py rename to src/zvt/domain/quotes/index/index_1d_kdata.py index adde0a0a..4c056e75 100644 --- a/zvt/domain/quotes/index/index_1d_kdata.py +++ b/src/zvt/domain/quotes/index/index_1d_kdata.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # this file is generated by gen_kdata_schema function, dont't change it -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import declarative_base from zvt.contract.register import register_schema from zvt.domain.quotes import IndexKdataCommon @@ -9,10 +9,11 @@ class Index1dKdata(KdataBase, IndexKdataCommon): - __tablename__ = 'index_1d_kdata' + __tablename__ = "index_1d_kdata" -register_schema(providers=['joinquant', 'sina'], db_name='index_1d_kdata', schema_base=KdataBase, entity_type='index') +register_schema(providers=["em", "sina"], db_name="index_1d_kdata", schema_base=KdataBase, entity_type="index") + # the __all__ is generated -__all__ = ['Index1dKdata'] \ No newline at end of file +__all__ = ["Index1dKdata"] diff --git a/src/zvt/domain/quotes/index/index_1m_kdata.py b/src/zvt/domain/quotes/index/index_1m_kdata.py new file mode 100644 index 00000000..705d4bee --- /dev/null +++ b/src/zvt/domain/quotes/index/index_1m_kdata.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# this file is generated by gen_kdata_schema function, dont't change it +from sqlalchemy.orm import declarative_base + +from zvt.contract import TradableEntity +from zvt.contract.register import register_schema +from zvt.domain.quotes import IndexKdataCommon + +KdataBase = declarative_base() + + +class Index1mKdata(KdataBase, IndexKdataCommon, TradableEntity): + __tablename__ = "index_1m_kdata" + + +register_schema(providers=["em", "sina", "qmt"], db_name="index_1m_kdata", schema_base=KdataBase, entity_type="index") + + +# the __all__ is generated +__all__ = ["Index1mKdata"] diff --git a/zvt/domain/quotes/index/index_1wk_kdata.py b/src/zvt/domain/quotes/index/index_1wk_kdata.py similarity index 56% rename from zvt/domain/quotes/index/index_1wk_kdata.py rename to src/zvt/domain/quotes/index/index_1wk_kdata.py index 78722708..42819741 100644 --- a/zvt/domain/quotes/index/index_1wk_kdata.py +++ b/src/zvt/domain/quotes/index/index_1wk_kdata.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # this file is generated by gen_kdata_schema function, dont't change it -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import declarative_base from zvt.contract.register import register_schema from zvt.domain.quotes import IndexKdataCommon @@ -9,10 +9,11 @@ class Index1wkKdata(KdataBase, IndexKdataCommon): - __tablename__ = 'index_1wk_kdata' + __tablename__ = "index_1wk_kdata" -register_schema(providers=['joinquant', 'sina'], db_name='index_1wk_kdata', schema_base=KdataBase, entity_type='index') +register_schema(providers=["em", "sina"], db_name="index_1wk_kdata", schema_base=KdataBase, entity_type="index") + # the __all__ is generated -__all__ = ['Index1wkKdata'] \ No newline at end of file +__all__ = ["Index1wkKdata"] diff --git a/src/zvt/domain/quotes/indexus/__init__.py b/src/zvt/domain/quotes/indexus/__init__.py new file mode 100644 index 00000000..5058652e --- /dev/null +++ b/src/zvt/domain/quotes/indexus/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- + + +# the __all__ is generated +__all__ = [] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule indexus_1d_kdata +from .indexus_1d_kdata import * +from .indexus_1d_kdata import __all__ as _indexus_1d_kdata_all + +__all__ += _indexus_1d_kdata_all diff --git a/src/zvt/domain/quotes/indexus/indexus_1d_kdata.py b/src/zvt/domain/quotes/indexus/indexus_1d_kdata.py new file mode 100644 index 00000000..9ecef7fb --- /dev/null +++ b/src/zvt/domain/quotes/indexus/indexus_1d_kdata.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +# this file is generated by gen_kdata_schema function, dont't change it +from sqlalchemy.orm import declarative_base + +from zvt.contract.register import register_schema +from zvt.domain.quotes import IndexusKdataCommon + +KdataBase = declarative_base() + + +class Indexus1dKdata(KdataBase, IndexusKdataCommon): + __tablename__ = "indexus_1d_kdata" + + +register_schema(providers=["em"], db_name="indexus_1d_kdata", schema_base=KdataBase, entity_type="indexus") + + +# the __all__ is generated +__all__ = ["Indexus1dKdata"] diff --git a/zvt/domain/quotes/stock/__init__.py b/src/zvt/domain/quotes/stock/__init__.py similarity index 94% rename from zvt/domain/quotes/stock/__init__.py rename to src/zvt/domain/quotes/stock/__init__.py index 413fd3a3..b6db7468 100644 --- a/zvt/domain/quotes/stock/__init__.py +++ b/src/zvt/domain/quotes/stock/__init__.py @@ -1,5 +1,7 @@ # -*- coding: utf-8 -*- # this file is generated by gen_kdata_schema function, dont't change it + + # the __all__ is generated __all__ = [] @@ -10,89 +12,113 @@ # import all from submodule stock_1h_kdata from .stock_1h_kdata import * from .stock_1h_kdata import __all__ as _stock_1h_kdata_all + __all__ += _stock_1h_kdata_all # import all from submodule stock_15m_hfq_kdata from .stock_15m_hfq_kdata import * from .stock_15m_hfq_kdata import __all__ as _stock_15m_hfq_kdata_all + __all__ += _stock_15m_hfq_kdata_all # import all from submodule stock_1wk_kdata from .stock_1wk_kdata import * from .stock_1wk_kdata import __all__ as _stock_1wk_kdata_all + __all__ += _stock_1wk_kdata_all # import all from submodule stock_15m_kdata from .stock_15m_kdata import * from .stock_15m_kdata import __all__ as _stock_15m_kdata_all + __all__ += _stock_15m_kdata_all # import all from submodule stock_1m_hfq_kdata from .stock_1m_hfq_kdata import * from .stock_1m_hfq_kdata import __all__ as _stock_1m_hfq_kdata_all + __all__ += _stock_1m_hfq_kdata_all # import all from submodule stock_4h_hfq_kdata from .stock_4h_hfq_kdata import * from .stock_4h_hfq_kdata import __all__ as _stock_4h_hfq_kdata_all + __all__ += _stock_4h_hfq_kdata_all # import all from submodule stock_5m_hfq_kdata from .stock_5m_hfq_kdata import * from .stock_5m_hfq_kdata import __all__ as _stock_5m_hfq_kdata_all + __all__ += _stock_5m_hfq_kdata_all # import all from submodule stock_5m_kdata from .stock_5m_kdata import * from .stock_5m_kdata import __all__ as _stock_5m_kdata_all + __all__ += _stock_5m_kdata_all # import all from submodule stock_1d_kdata from .stock_1d_kdata import * from .stock_1d_kdata import __all__ as _stock_1d_kdata_all + __all__ += _stock_1d_kdata_all # import all from submodule stock_30m_hfq_kdata from .stock_30m_hfq_kdata import * from .stock_30m_hfq_kdata import __all__ as _stock_30m_hfq_kdata_all + __all__ += _stock_30m_hfq_kdata_all # import all from submodule stock_1mon_hfq_kdata from .stock_1mon_hfq_kdata import * from .stock_1mon_hfq_kdata import __all__ as _stock_1mon_hfq_kdata_all + __all__ += _stock_1mon_hfq_kdata_all # import all from submodule stock_1wk_hfq_kdata from .stock_1wk_hfq_kdata import * from .stock_1wk_hfq_kdata import __all__ as _stock_1wk_hfq_kdata_all + __all__ += _stock_1wk_hfq_kdata_all # import all from submodule stock_1mon_kdata from .stock_1mon_kdata import * from .stock_1mon_kdata import __all__ as _stock_1mon_kdata_all + __all__ += _stock_1mon_kdata_all # import all from submodule stock_1h_hfq_kdata from .stock_1h_hfq_kdata import * from .stock_1h_hfq_kdata import __all__ as _stock_1h_hfq_kdata_all + __all__ += _stock_1h_hfq_kdata_all # import all from submodule stock_1m_kdata from .stock_1m_kdata import * from .stock_1m_kdata import __all__ as _stock_1m_kdata_all + __all__ += _stock_1m_kdata_all # import all from submodule stock_4h_kdata from .stock_4h_kdata import * from .stock_4h_kdata import __all__ as _stock_4h_kdata_all + __all__ += _stock_4h_kdata_all # import all from submodule stock_1d_hfq_kdata from .stock_1d_hfq_kdata import * from .stock_1d_hfq_kdata import __all__ as _stock_1d_hfq_kdata_all + __all__ += _stock_1d_hfq_kdata_all +# import all from submodule stock_quote +from .stock_quote import * +from .stock_quote import __all__ as _stock_quote_all + +__all__ += _stock_quote_all + # import all from submodule stock_30m_kdata from .stock_30m_kdata import * from .stock_30m_kdata import __all__ as _stock_30m_kdata_all -__all__ += _stock_30m_kdata_all \ No newline at end of file + +__all__ += _stock_30m_kdata_all diff --git a/zvt/domain/quotes/stock/stock_15m_hfq_kdata.py b/src/zvt/domain/quotes/stock/stock_15m_hfq_kdata.py similarity index 55% rename from zvt/domain/quotes/stock/stock_15m_hfq_kdata.py rename to src/zvt/domain/quotes/stock/stock_15m_hfq_kdata.py index 2ebb1385..ca66e7a1 100644 --- a/zvt/domain/quotes/stock/stock_15m_hfq_kdata.py +++ b/src/zvt/domain/quotes/stock/stock_15m_hfq_kdata.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # this file is generated by gen_kdata_schema function, dont't change it -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import declarative_base from zvt.contract.register import register_schema from zvt.domain.quotes import StockKdataCommon @@ -9,10 +9,13 @@ class Stock15mHfqKdata(KdataBase, StockKdataCommon): - __tablename__ = 'stock_15m_hfq_kdata' + __tablename__ = "stock_15m_hfq_kdata" -register_schema(providers=['joinquant'], db_name='stock_15m_hfq_kdata', schema_base=KdataBase, entity_type='stock') +register_schema( + providers=["em", "qmt", "joinquant"], db_name="stock_15m_hfq_kdata", schema_base=KdataBase, entity_type="stock" +) + # the __all__ is generated -__all__ = ['Stock15mHfqKdata'] \ No newline at end of file +__all__ = ["Stock15mHfqKdata"] diff --git a/zvt/domain/quotes/stock/stock_15m_kdata.py b/src/zvt/domain/quotes/stock/stock_15m_kdata.py similarity index 56% rename from zvt/domain/quotes/stock/stock_15m_kdata.py rename to src/zvt/domain/quotes/stock/stock_15m_kdata.py index 470c0ea4..770e60cc 100644 --- a/zvt/domain/quotes/stock/stock_15m_kdata.py +++ b/src/zvt/domain/quotes/stock/stock_15m_kdata.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # this file is generated by gen_kdata_schema function, dont't change it -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import declarative_base from zvt.contract.register import register_schema from zvt.domain.quotes import StockKdataCommon @@ -9,10 +9,13 @@ class Stock15mKdata(KdataBase, StockKdataCommon): - __tablename__ = 'stock_15m_kdata' + __tablename__ = "stock_15m_kdata" -register_schema(providers=['joinquant'], db_name='stock_15m_kdata', schema_base=KdataBase, entity_type='stock') +register_schema( + providers=["em", "qmt", "joinquant"], db_name="stock_15m_kdata", schema_base=KdataBase, entity_type="stock" +) + # the __all__ is generated -__all__ = ['Stock15mKdata'] \ No newline at end of file +__all__ = ["Stock15mKdata"] diff --git a/zvt/domain/quotes/stock/stock_1d_hfq_kdata.py b/src/zvt/domain/quotes/stock/stock_1d_hfq_kdata.py similarity index 55% rename from zvt/domain/quotes/stock/stock_1d_hfq_kdata.py rename to src/zvt/domain/quotes/stock/stock_1d_hfq_kdata.py index 074cd0b3..108ab52a 100644 --- a/zvt/domain/quotes/stock/stock_1d_hfq_kdata.py +++ b/src/zvt/domain/quotes/stock/stock_1d_hfq_kdata.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # this file is generated by gen_kdata_schema function, dont't change it -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import declarative_base from zvt.contract.register import register_schema from zvt.domain.quotes import StockKdataCommon @@ -9,10 +9,13 @@ class Stock1dHfqKdata(KdataBase, StockKdataCommon): - __tablename__ = 'stock_1d_hfq_kdata' + __tablename__ = "stock_1d_hfq_kdata" -register_schema(providers=['joinquant'], db_name='stock_1d_hfq_kdata', schema_base=KdataBase, entity_type='stock') +register_schema( + providers=["em", "qmt", "joinquant"], db_name="stock_1d_hfq_kdata", schema_base=KdataBase, entity_type="stock" +) + # the __all__ is generated -__all__ = ['Stock1dHfqKdata'] \ No newline at end of file +__all__ = ["Stock1dHfqKdata"] diff --git a/zvt/domain/quotes/stock/stock_1d_kdata.py b/src/zvt/domain/quotes/stock/stock_1d_kdata.py similarity index 56% rename from zvt/domain/quotes/stock/stock_1d_kdata.py rename to src/zvt/domain/quotes/stock/stock_1d_kdata.py index f1b956b1..eb46fa83 100644 --- a/zvt/domain/quotes/stock/stock_1d_kdata.py +++ b/src/zvt/domain/quotes/stock/stock_1d_kdata.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # this file is generated by gen_kdata_schema function, dont't change it -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import declarative_base from zvt.contract.register import register_schema from zvt.domain.quotes import StockKdataCommon @@ -9,10 +9,13 @@ class Stock1dKdata(KdataBase, StockKdataCommon): - __tablename__ = 'stock_1d_kdata' + __tablename__ = "stock_1d_kdata" -register_schema(providers=['joinquant'], db_name='stock_1d_kdata', schema_base=KdataBase, entity_type='stock') +register_schema( + providers=["em", "qmt", "joinquant"], db_name="stock_1d_kdata", schema_base=KdataBase, entity_type="stock" +) + # the __all__ is generated -__all__ = ['Stock1dKdata'] \ No newline at end of file +__all__ = ["Stock1dKdata"] diff --git a/zvt/domain/quotes/stock/stock_1h_hfq_kdata.py b/src/zvt/domain/quotes/stock/stock_1h_hfq_kdata.py similarity index 55% rename from zvt/domain/quotes/stock/stock_1h_hfq_kdata.py rename to src/zvt/domain/quotes/stock/stock_1h_hfq_kdata.py index 95660d73..b864673b 100644 --- a/zvt/domain/quotes/stock/stock_1h_hfq_kdata.py +++ b/src/zvt/domain/quotes/stock/stock_1h_hfq_kdata.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # this file is generated by gen_kdata_schema function, dont't change it -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import declarative_base from zvt.contract.register import register_schema from zvt.domain.quotes import StockKdataCommon @@ -9,10 +9,13 @@ class Stock1hHfqKdata(KdataBase, StockKdataCommon): - __tablename__ = 'stock_1h_hfq_kdata' + __tablename__ = "stock_1h_hfq_kdata" -register_schema(providers=['joinquant'], db_name='stock_1h_hfq_kdata', schema_base=KdataBase, entity_type='stock') +register_schema( + providers=["em", "qmt", "joinquant"], db_name="stock_1h_hfq_kdata", schema_base=KdataBase, entity_type="stock" +) + # the __all__ is generated -__all__ = ['Stock1hHfqKdata'] \ No newline at end of file +__all__ = ["Stock1hHfqKdata"] diff --git a/zvt/domain/quotes/stock/stock_1h_kdata.py b/src/zvt/domain/quotes/stock/stock_1h_kdata.py similarity index 56% rename from zvt/domain/quotes/stock/stock_1h_kdata.py rename to src/zvt/domain/quotes/stock/stock_1h_kdata.py index 246c2c98..cdbeacac 100644 --- a/zvt/domain/quotes/stock/stock_1h_kdata.py +++ b/src/zvt/domain/quotes/stock/stock_1h_kdata.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # this file is generated by gen_kdata_schema function, dont't change it -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import declarative_base from zvt.contract.register import register_schema from zvt.domain.quotes import StockKdataCommon @@ -9,10 +9,13 @@ class Stock1hKdata(KdataBase, StockKdataCommon): - __tablename__ = 'stock_1h_kdata' + __tablename__ = "stock_1h_kdata" -register_schema(providers=['joinquant'], db_name='stock_1h_kdata', schema_base=KdataBase, entity_type='stock') +register_schema( + providers=["em", "qmt", "joinquant"], db_name="stock_1h_kdata", schema_base=KdataBase, entity_type="stock" +) + # the __all__ is generated -__all__ = ['Stock1hKdata'] \ No newline at end of file +__all__ = ["Stock1hKdata"] diff --git a/zvt/domain/quotes/stock/stock_1m_hfq_kdata.py b/src/zvt/domain/quotes/stock/stock_1m_hfq_kdata.py similarity index 55% rename from zvt/domain/quotes/stock/stock_1m_hfq_kdata.py rename to src/zvt/domain/quotes/stock/stock_1m_hfq_kdata.py index 1ad3315e..c91345ca 100644 --- a/zvt/domain/quotes/stock/stock_1m_hfq_kdata.py +++ b/src/zvt/domain/quotes/stock/stock_1m_hfq_kdata.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # this file is generated by gen_kdata_schema function, dont't change it -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import declarative_base from zvt.contract.register import register_schema from zvt.domain.quotes import StockKdataCommon @@ -9,10 +9,13 @@ class Stock1mHfqKdata(KdataBase, StockKdataCommon): - __tablename__ = 'stock_1m_hfq_kdata' + __tablename__ = "stock_1m_hfq_kdata" -register_schema(providers=['joinquant'], db_name='stock_1m_hfq_kdata', schema_base=KdataBase, entity_type='stock') +register_schema( + providers=["em", "qmt", "joinquant"], db_name="stock_1m_hfq_kdata", schema_base=KdataBase, entity_type="stock" +) + # the __all__ is generated -__all__ = ['Stock1mHfqKdata'] \ No newline at end of file +__all__ = ["Stock1mHfqKdata"] diff --git a/zvt/domain/quotes/stock/stock_1m_kdata.py b/src/zvt/domain/quotes/stock/stock_1m_kdata.py similarity index 56% rename from zvt/domain/quotes/stock/stock_1m_kdata.py rename to src/zvt/domain/quotes/stock/stock_1m_kdata.py index ffacdd4a..642f4a0d 100644 --- a/zvt/domain/quotes/stock/stock_1m_kdata.py +++ b/src/zvt/domain/quotes/stock/stock_1m_kdata.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # this file is generated by gen_kdata_schema function, dont't change it -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import declarative_base from zvt.contract.register import register_schema from zvt.domain.quotes import StockKdataCommon @@ -9,10 +9,13 @@ class Stock1mKdata(KdataBase, StockKdataCommon): - __tablename__ = 'stock_1m_kdata' + __tablename__ = "stock_1m_kdata" -register_schema(providers=['joinquant'], db_name='stock_1m_kdata', schema_base=KdataBase, entity_type='stock') +register_schema( + providers=["em", "qmt", "joinquant"], db_name="stock_1m_kdata", schema_base=KdataBase, entity_type="stock" +) + # the __all__ is generated -__all__ = ['Stock1mKdata'] \ No newline at end of file +__all__ = ["Stock1mKdata"] diff --git a/zvt/domain/quotes/stock/stock_1mon_hfq_kdata.py b/src/zvt/domain/quotes/stock/stock_1mon_hfq_kdata.py similarity index 54% rename from zvt/domain/quotes/stock/stock_1mon_hfq_kdata.py rename to src/zvt/domain/quotes/stock/stock_1mon_hfq_kdata.py index 096e973c..495bc3a1 100644 --- a/zvt/domain/quotes/stock/stock_1mon_hfq_kdata.py +++ b/src/zvt/domain/quotes/stock/stock_1mon_hfq_kdata.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # this file is generated by gen_kdata_schema function, dont't change it -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import declarative_base from zvt.contract.register import register_schema from zvt.domain.quotes import StockKdataCommon @@ -9,10 +9,13 @@ class Stock1monHfqKdata(KdataBase, StockKdataCommon): - __tablename__ = 'stock_1mon_hfq_kdata' + __tablename__ = "stock_1mon_hfq_kdata" -register_schema(providers=['joinquant'], db_name='stock_1mon_hfq_kdata', schema_base=KdataBase, entity_type='stock') +register_schema( + providers=["em", "qmt", "joinquant"], db_name="stock_1mon_hfq_kdata", schema_base=KdataBase, entity_type="stock" +) + # the __all__ is generated -__all__ = ['Stock1monHfqKdata'] \ No newline at end of file +__all__ = ["Stock1monHfqKdata"] diff --git a/zvt/domain/quotes/stock/stock_1mon_kdata.py b/src/zvt/domain/quotes/stock/stock_1mon_kdata.py similarity index 55% rename from zvt/domain/quotes/stock/stock_1mon_kdata.py rename to src/zvt/domain/quotes/stock/stock_1mon_kdata.py index fc82fff3..0f3f35f3 100644 --- a/zvt/domain/quotes/stock/stock_1mon_kdata.py +++ b/src/zvt/domain/quotes/stock/stock_1mon_kdata.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # this file is generated by gen_kdata_schema function, dont't change it -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import declarative_base from zvt.contract.register import register_schema from zvt.domain.quotes import StockKdataCommon @@ -9,10 +9,13 @@ class Stock1monKdata(KdataBase, StockKdataCommon): - __tablename__ = 'stock_1mon_kdata' + __tablename__ = "stock_1mon_kdata" -register_schema(providers=['joinquant'], db_name='stock_1mon_kdata', schema_base=KdataBase, entity_type='stock') +register_schema( + providers=["em", "qmt", "joinquant"], db_name="stock_1mon_kdata", schema_base=KdataBase, entity_type="stock" +) + # the __all__ is generated -__all__ = ['Stock1monKdata'] \ No newline at end of file +__all__ = ["Stock1monKdata"] diff --git a/zvt/domain/quotes/stock/stock_1wk_hfq_kdata.py b/src/zvt/domain/quotes/stock/stock_1wk_hfq_kdata.py similarity index 55% rename from zvt/domain/quotes/stock/stock_1wk_hfq_kdata.py rename to src/zvt/domain/quotes/stock/stock_1wk_hfq_kdata.py index 53e30dec..0c3a649f 100644 --- a/zvt/domain/quotes/stock/stock_1wk_hfq_kdata.py +++ b/src/zvt/domain/quotes/stock/stock_1wk_hfq_kdata.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # this file is generated by gen_kdata_schema function, dont't change it -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import declarative_base from zvt.contract.register import register_schema from zvt.domain.quotes import StockKdataCommon @@ -9,10 +9,13 @@ class Stock1wkHfqKdata(KdataBase, StockKdataCommon): - __tablename__ = 'stock_1wk_hfq_kdata' + __tablename__ = "stock_1wk_hfq_kdata" -register_schema(providers=['joinquant'], db_name='stock_1wk_hfq_kdata', schema_base=KdataBase, entity_type='stock') +register_schema( + providers=["em", "qmt", "joinquant"], db_name="stock_1wk_hfq_kdata", schema_base=KdataBase, entity_type="stock" +) + # the __all__ is generated -__all__ = ['Stock1wkHfqKdata'] \ No newline at end of file +__all__ = ["Stock1wkHfqKdata"] diff --git a/zvt/domain/quotes/stock/stock_1wk_kdata.py b/src/zvt/domain/quotes/stock/stock_1wk_kdata.py similarity index 56% rename from zvt/domain/quotes/stock/stock_1wk_kdata.py rename to src/zvt/domain/quotes/stock/stock_1wk_kdata.py index 35fd4d1e..a6386ef0 100644 --- a/zvt/domain/quotes/stock/stock_1wk_kdata.py +++ b/src/zvt/domain/quotes/stock/stock_1wk_kdata.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # this file is generated by gen_kdata_schema function, dont't change it -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import declarative_base from zvt.contract.register import register_schema from zvt.domain.quotes import StockKdataCommon @@ -9,10 +9,13 @@ class Stock1wkKdata(KdataBase, StockKdataCommon): - __tablename__ = 'stock_1wk_kdata' + __tablename__ = "stock_1wk_kdata" -register_schema(providers=['joinquant'], db_name='stock_1wk_kdata', schema_base=KdataBase, entity_type='stock') +register_schema( + providers=["em", "qmt", "joinquant"], db_name="stock_1wk_kdata", schema_base=KdataBase, entity_type="stock" +) + # the __all__ is generated -__all__ = ['Stock1wkKdata'] \ No newline at end of file +__all__ = ["Stock1wkKdata"] diff --git a/zvt/domain/quotes/stock/stock_30m_hfq_kdata.py b/src/zvt/domain/quotes/stock/stock_30m_hfq_kdata.py similarity index 55% rename from zvt/domain/quotes/stock/stock_30m_hfq_kdata.py rename to src/zvt/domain/quotes/stock/stock_30m_hfq_kdata.py index ea6433cd..09ab4176 100644 --- a/zvt/domain/quotes/stock/stock_30m_hfq_kdata.py +++ b/src/zvt/domain/quotes/stock/stock_30m_hfq_kdata.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # this file is generated by gen_kdata_schema function, dont't change it -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import declarative_base from zvt.contract.register import register_schema from zvt.domain.quotes import StockKdataCommon @@ -9,10 +9,13 @@ class Stock30mHfqKdata(KdataBase, StockKdataCommon): - __tablename__ = 'stock_30m_hfq_kdata' + __tablename__ = "stock_30m_hfq_kdata" -register_schema(providers=['joinquant'], db_name='stock_30m_hfq_kdata', schema_base=KdataBase, entity_type='stock') +register_schema( + providers=["em", "qmt", "joinquant"], db_name="stock_30m_hfq_kdata", schema_base=KdataBase, entity_type="stock" +) + # the __all__ is generated -__all__ = ['Stock30mHfqKdata'] \ No newline at end of file +__all__ = ["Stock30mHfqKdata"] diff --git a/zvt/domain/quotes/stock/stock_30m_kdata.py b/src/zvt/domain/quotes/stock/stock_30m_kdata.py similarity index 56% rename from zvt/domain/quotes/stock/stock_30m_kdata.py rename to src/zvt/domain/quotes/stock/stock_30m_kdata.py index 25757cd6..a75ffe7f 100644 --- a/zvt/domain/quotes/stock/stock_30m_kdata.py +++ b/src/zvt/domain/quotes/stock/stock_30m_kdata.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # this file is generated by gen_kdata_schema function, dont't change it -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import declarative_base from zvt.contract.register import register_schema from zvt.domain.quotes import StockKdataCommon @@ -9,10 +9,13 @@ class Stock30mKdata(KdataBase, StockKdataCommon): - __tablename__ = 'stock_30m_kdata' + __tablename__ = "stock_30m_kdata" -register_schema(providers=['joinquant'], db_name='stock_30m_kdata', schema_base=KdataBase, entity_type='stock') +register_schema( + providers=["em", "qmt", "joinquant"], db_name="stock_30m_kdata", schema_base=KdataBase, entity_type="stock" +) + # the __all__ is generated -__all__ = ['Stock30mKdata'] \ No newline at end of file +__all__ = ["Stock30mKdata"] diff --git a/zvt/domain/quotes/stock/stock_4h_hfq_kdata.py b/src/zvt/domain/quotes/stock/stock_4h_hfq_kdata.py similarity index 55% rename from zvt/domain/quotes/stock/stock_4h_hfq_kdata.py rename to src/zvt/domain/quotes/stock/stock_4h_hfq_kdata.py index f4f861fc..0f017c16 100644 --- a/zvt/domain/quotes/stock/stock_4h_hfq_kdata.py +++ b/src/zvt/domain/quotes/stock/stock_4h_hfq_kdata.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # this file is generated by gen_kdata_schema function, dont't change it -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import declarative_base from zvt.contract.register import register_schema from zvt.domain.quotes import StockKdataCommon @@ -9,10 +9,13 @@ class Stock4hHfqKdata(KdataBase, StockKdataCommon): - __tablename__ = 'stock_4h_hfq_kdata' + __tablename__ = "stock_4h_hfq_kdata" -register_schema(providers=['joinquant'], db_name='stock_4h_hfq_kdata', schema_base=KdataBase, entity_type='stock') +register_schema( + providers=["em", "qmt", "joinquant"], db_name="stock_4h_hfq_kdata", schema_base=KdataBase, entity_type="stock" +) + # the __all__ is generated -__all__ = ['Stock4hHfqKdata'] \ No newline at end of file +__all__ = ["Stock4hHfqKdata"] diff --git a/zvt/domain/quotes/stock/stock_4h_kdata.py b/src/zvt/domain/quotes/stock/stock_4h_kdata.py similarity index 56% rename from zvt/domain/quotes/stock/stock_4h_kdata.py rename to src/zvt/domain/quotes/stock/stock_4h_kdata.py index b9814ab4..7ca9c022 100644 --- a/zvt/domain/quotes/stock/stock_4h_kdata.py +++ b/src/zvt/domain/quotes/stock/stock_4h_kdata.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # this file is generated by gen_kdata_schema function, dont't change it -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import declarative_base from zvt.contract.register import register_schema from zvt.domain.quotes import StockKdataCommon @@ -9,10 +9,13 @@ class Stock4hKdata(KdataBase, StockKdataCommon): - __tablename__ = 'stock_4h_kdata' + __tablename__ = "stock_4h_kdata" -register_schema(providers=['joinquant'], db_name='stock_4h_kdata', schema_base=KdataBase, entity_type='stock') +register_schema( + providers=["em", "qmt", "joinquant"], db_name="stock_4h_kdata", schema_base=KdataBase, entity_type="stock" +) + # the __all__ is generated -__all__ = ['Stock4hKdata'] \ No newline at end of file +__all__ = ["Stock4hKdata"] diff --git a/zvt/domain/quotes/stock/stock_5m_hfq_kdata.py b/src/zvt/domain/quotes/stock/stock_5m_hfq_kdata.py similarity index 55% rename from zvt/domain/quotes/stock/stock_5m_hfq_kdata.py rename to src/zvt/domain/quotes/stock/stock_5m_hfq_kdata.py index e083adf2..e935faa3 100644 --- a/zvt/domain/quotes/stock/stock_5m_hfq_kdata.py +++ b/src/zvt/domain/quotes/stock/stock_5m_hfq_kdata.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # this file is generated by gen_kdata_schema function, dont't change it -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import declarative_base from zvt.contract.register import register_schema from zvt.domain.quotes import StockKdataCommon @@ -9,10 +9,13 @@ class Stock5mHfqKdata(KdataBase, StockKdataCommon): - __tablename__ = 'stock_5m_hfq_kdata' + __tablename__ = "stock_5m_hfq_kdata" -register_schema(providers=['joinquant'], db_name='stock_5m_hfq_kdata', schema_base=KdataBase, entity_type='stock') +register_schema( + providers=["em", "qmt", "joinquant"], db_name="stock_5m_hfq_kdata", schema_base=KdataBase, entity_type="stock" +) + # the __all__ is generated -__all__ = ['Stock5mHfqKdata'] \ No newline at end of file +__all__ = ["Stock5mHfqKdata"] diff --git a/zvt/domain/quotes/stock/stock_5m_kdata.py b/src/zvt/domain/quotes/stock/stock_5m_kdata.py similarity index 56% rename from zvt/domain/quotes/stock/stock_5m_kdata.py rename to src/zvt/domain/quotes/stock/stock_5m_kdata.py index 5dffae30..a4717bab 100644 --- a/zvt/domain/quotes/stock/stock_5m_kdata.py +++ b/src/zvt/domain/quotes/stock/stock_5m_kdata.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # this file is generated by gen_kdata_schema function, dont't change it -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import declarative_base from zvt.contract.register import register_schema from zvt.domain.quotes import StockKdataCommon @@ -9,10 +9,13 @@ class Stock5mKdata(KdataBase, StockKdataCommon): - __tablename__ = 'stock_5m_kdata' + __tablename__ = "stock_5m_kdata" -register_schema(providers=['joinquant'], db_name='stock_5m_kdata', schema_base=KdataBase, entity_type='stock') +register_schema( + providers=["em", "qmt", "joinquant"], db_name="stock_5m_kdata", schema_base=KdataBase, entity_type="stock" +) + # the __all__ is generated -__all__ = ['Stock5mKdata'] \ No newline at end of file +__all__ = ["Stock5mKdata"] diff --git a/src/zvt/domain/quotes/stock/stock_quote.py b/src/zvt/domain/quotes/stock/stock_quote.py new file mode 100644 index 00000000..ea4b5f86 --- /dev/null +++ b/src/zvt/domain/quotes/stock/stock_quote.py @@ -0,0 +1,118 @@ +# -*- coding: utf-8 -*- +from sqlalchemy import String, Column, Float, Integer, Boolean, JSON +from sqlalchemy.orm import declarative_base + +from zvt.contract import Mixin +from zvt.contract.register import register_schema +from zvt.domain.quotes import StockKdataCommon + +StockQuoteBase = declarative_base() + + +class StockTick(StockQuoteBase, Mixin): + __tablename__ = "stock_tick" + + code = Column(String(length=32)) + + #: UNIX时间戳 + time = Column(Integer) + #: 最新价 + lastPrice = Column(Float) + + # 开盘价 + open = Column(Float) + # 最高价 + high = Column(Float) + # 最低价 + low = Column(Float) + # 上日收盘价 + lastClose = Column(Float) + + amount = Column(Float) + volume = Column(Float) + pvolume = Column(Float) + + askPrice = Column(JSON) + askVol = Column(JSON) + bidPrice = Column(JSON) + bidVol = Column(JSON) + + +class StockQuote(StockQuoteBase, StockKdataCommon): + __tablename__ = "stock_quote" + #: UNIX时间戳 + time = Column(Integer) + #: 最新价 + price = Column(Float) + #: 是否涨停 + is_limit_up = Column(Boolean) + #: 封涨停金额 + limit_up_amount = Column(Float) + #: 是否跌停 + is_limit_down = Column(Boolean) + #: 封跌停金额 + limit_down_amount = Column(Float) + #: 5挡卖单金额 + ask_amount = Column(Float) + #: 5挡买单金额 + bid_amount = Column(Float) + #: 流通市值 + float_cap = Column(Float) + #: 总市值 + total_cap = Column(Float) + + +class StockQuoteLog(StockQuoteBase, StockKdataCommon): + __tablename__ = "stock_quote_log" + #: UNIX时间戳 + time = Column(Integer) + #: 最新价 + price = Column(Float) + #: 是否涨停 + is_limit_up = Column(Boolean) + #: 封涨停金额 + limit_up_amount = Column(Float) + #: 是否跌停 + is_limit_down = Column(Boolean) + #: 封跌停金额 + limit_down_amount = Column(Float) + #: 5挡卖单金额 + ask_amount = Column(Float) + #: 5挡买单金额 + bid_amount = Column(Float) + #: 流通市值 + float_cap = Column(Float) + #: 总市值 + total_cap = Column(Float) + + +class Stock1mQuote(StockQuoteBase, Mixin): + __tablename__ = "stock_1m_quote" + code = Column(String(length=32)) + name = Column(String(length=32)) + + #: UNIX时间戳 + time = Column(Integer) + #: 最新价 + price = Column(Float) + #: 均价 + avg_price = Column(Float) + # 涨跌幅 + change_pct = Column(Float) + # 成交量 + volume = Column(Float) + # 成交金额 + turnover = Column(Float) + # 换手率 + turnover_rate = Column(Float) + #: 是否涨停 + is_limit_up = Column(Boolean) + #: 是否跌停 + is_limit_down = Column(Boolean) + + +register_schema(providers=["qmt"], db_name="stock_quote", schema_base=StockQuoteBase, entity_type="stock") + + +# the __all__ is generated +__all__ = ["StockQuote", "StockQuoteLog", "Stock1mQuote"] diff --git a/src/zvt/domain/quotes/stockhk/__init__.py b/src/zvt/domain/quotes/stockhk/__init__.py new file mode 100644 index 00000000..546d88d0 --- /dev/null +++ b/src/zvt/domain/quotes/stockhk/__init__.py @@ -0,0 +1,21 @@ +# -*- coding: utf-8 -*- + + +# the __all__ is generated +__all__ = [] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule stockhk_1d_kdata +from .stockhk_1d_kdata import * +from .stockhk_1d_kdata import __all__ as _stockhk_1d_kdata_all + +__all__ += _stockhk_1d_kdata_all + +# import all from submodule stockhk_1d_hfq_kdata +from .stockhk_1d_hfq_kdata import * +from .stockhk_1d_hfq_kdata import __all__ as _stockhk_1d_hfq_kdata_all + +__all__ += _stockhk_1d_hfq_kdata_all diff --git a/src/zvt/domain/quotes/stockhk/stockhk_1d_hfq_kdata.py b/src/zvt/domain/quotes/stockhk/stockhk_1d_hfq_kdata.py new file mode 100644 index 00000000..35b4bfdf --- /dev/null +++ b/src/zvt/domain/quotes/stockhk/stockhk_1d_hfq_kdata.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +# this file is generated by gen_kdata_schema function, dont't change it +from sqlalchemy.orm import declarative_base + +from zvt.contract.register import register_schema +from zvt.domain.quotes import StockhkKdataCommon + +KdataBase = declarative_base() + + +class Stockhk1dHfqKdata(KdataBase, StockhkKdataCommon): + __tablename__ = "stockhk_1d_hfq_kdata" + + +register_schema(providers=["em"], db_name="stockhk_1d_hfq_kdata", schema_base=KdataBase, entity_type="stockhk") + + +# the __all__ is generated +__all__ = ["Stockhk1dHfqKdata"] diff --git a/src/zvt/domain/quotes/stockhk/stockhk_1d_kdata.py b/src/zvt/domain/quotes/stockhk/stockhk_1d_kdata.py new file mode 100644 index 00000000..b056a503 --- /dev/null +++ b/src/zvt/domain/quotes/stockhk/stockhk_1d_kdata.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +# this file is generated by gen_kdata_schema function, dont't change it +from sqlalchemy.orm import declarative_base + +from zvt.contract.register import register_schema +from zvt.domain.quotes import StockhkKdataCommon + +KdataBase = declarative_base() + + +class Stockhk1dKdata(KdataBase, StockhkKdataCommon): + __tablename__ = "stockhk_1d_kdata" + + +register_schema(providers=["em"], db_name="stockhk_1d_kdata", schema_base=KdataBase, entity_type="stockhk") + + +# the __all__ is generated +__all__ = ["Stockhk1dKdata"] diff --git a/src/zvt/domain/quotes/stockus/__init__.py b/src/zvt/domain/quotes/stockus/__init__.py new file mode 100644 index 00000000..72c8855b --- /dev/null +++ b/src/zvt/domain/quotes/stockus/__init__.py @@ -0,0 +1,21 @@ +# -*- coding: utf-8 -*- + + +# the __all__ is generated +__all__ = [] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule stockus_1d_kdata +from .stockus_1d_kdata import * +from .stockus_1d_kdata import __all__ as _stockus_1d_kdata_all + +__all__ += _stockus_1d_kdata_all + +# import all from submodule stockus_1d_hfq_kdata +from .stockus_1d_hfq_kdata import * +from .stockus_1d_hfq_kdata import __all__ as _stockus_1d_hfq_kdata_all + +__all__ += _stockus_1d_hfq_kdata_all diff --git a/src/zvt/domain/quotes/stockus/stockus_1d_hfq_kdata.py b/src/zvt/domain/quotes/stockus/stockus_1d_hfq_kdata.py new file mode 100644 index 00000000..90980435 --- /dev/null +++ b/src/zvt/domain/quotes/stockus/stockus_1d_hfq_kdata.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +# this file is generated by gen_kdata_schema function, dont't change it +from sqlalchemy.orm import declarative_base + +from zvt.contract.register import register_schema +from zvt.domain.quotes import StockusKdataCommon + +KdataBase = declarative_base() + + +class Stockus1dHfqKdata(KdataBase, StockusKdataCommon): + __tablename__ = "stockus_1d_hfq_kdata" + + +register_schema(providers=["em"], db_name="stockus_1d_hfq_kdata", schema_base=KdataBase, entity_type="stockus") + + +# the __all__ is generated +__all__ = ["Stockus1dHfqKdata"] diff --git a/src/zvt/domain/quotes/stockus/stockus_1d_kdata.py b/src/zvt/domain/quotes/stockus/stockus_1d_kdata.py new file mode 100644 index 00000000..16c43731 --- /dev/null +++ b/src/zvt/domain/quotes/stockus/stockus_1d_kdata.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +# this file is generated by gen_kdata_schema function, dont't change it +from sqlalchemy.orm import declarative_base + +from zvt.contract.register import register_schema +from zvt.domain.quotes import StockusKdataCommon + +KdataBase = declarative_base() + + +class Stockus1dKdata(KdataBase, StockusKdataCommon): + __tablename__ = "stockus_1d_kdata" + + +register_schema(providers=["em"], db_name="stockus_1d_kdata", schema_base=KdataBase, entity_type="stockus") + + +# the __all__ is generated +__all__ = ["Stockus1dKdata"] diff --git a/zvt/domain/quotes/trade_day.py b/src/zvt/domain/quotes/trade_day.py similarity index 52% rename from zvt/domain/quotes/trade_day.py rename to src/zvt/domain/quotes/trade_day.py index d7d25727..fa3607e7 100644 --- a/zvt/domain/quotes/trade_day.py +++ b/src/zvt/domain/quotes/trade_day.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import declarative_base from zvt.contract import Mixin from zvt.contract.register import register_schema @@ -8,11 +8,11 @@ class StockTradeDay(TradeDayBase, Mixin): - __tablename__ = 'stock_trade_day' + __tablename__ = "stock_trade_day" -register_schema(providers=['joinquant'], db_name='trade_day', schema_base=TradeDayBase) +register_schema(providers=["joinquant"], db_name="trade_day", schema_base=TradeDayBase) + -__all__ = ['StockTradeDay'] # the __all__ is generated -__all__ = ['StockTradeDay'] \ No newline at end of file +__all__ = ["StockTradeDay"] diff --git a/zvt/factors/__init__.py b/src/zvt/factors/__init__.py similarity index 58% rename from zvt/factors/__init__.py rename to src/zvt/factors/__init__.py index c9b3158c..498be46f 100644 --- a/zvt/factors/__init__.py +++ b/src/zvt/factors/__init__.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*-# + # the __all__ is generated __all__ = [] @@ -9,34 +10,71 @@ # import all from submodule algorithm from .algorithm import * from .algorithm import __all__ as _algorithm_all + __all__ += _algorithm_all +# import all from submodule top_stocks +from .top_stocks import * +from .top_stocks import __all__ as _top_stocks_all + +__all__ += _top_stocks_all + # import all from submodule ma from .ma import * from .ma import __all__ as _ma_all + __all__ += _ma_all +# import all from submodule transformers +from .transformers import * +from .transformers import __all__ as _transformers_all + +__all__ += _transformers_all + # import all from submodule macd from .macd import * from .macd import __all__ as _macd_all + __all__ += _macd_all # import all from submodule zen from .zen import * from .zen import __all__ as _zen_all + __all__ += _zen_all # import all from submodule technical_factor from .technical_factor import * from .technical_factor import __all__ as _technical_factor_all + __all__ += _technical_factor_all # import all from submodule fundamental from .fundamental import * from .fundamental import __all__ as _fundamental_all + __all__ += _fundamental_all +# import all from submodule factor_service +from .factor_service import * +from .factor_service import __all__ as _factor_service_all + +__all__ += _factor_service_all + +# import all from submodule factor_models +from .factor_models import * +from .factor_models import __all__ as _factor_models_all + +__all__ += _factor_models_all + # import all from submodule target_selector from .target_selector import * from .target_selector import __all__ as _target_selector_all -__all__ += _target_selector_all \ No newline at end of file + +__all__ += _target_selector_all + +# import all from submodule shape +from .shape import * +from .shape import __all__ as _shape_all + +__all__ += _shape_all diff --git a/zvt/factors/algorithm.py b/src/zvt/factors/algorithm.py similarity index 53% rename from zvt/factors/algorithm.py rename to src/zvt/factors/algorithm.py index c11a5fd1..8b30c478 100644 --- a/zvt/factors/algorithm.py +++ b/src/zvt/factors/algorithm.py @@ -3,10 +3,10 @@ import pandas as pd from zvt.contract.factor import Scorer, Transformer -from zvt.utils.pd_utils import normal_index_df +from zvt.utils.pd_utils import normal_index_df, group_by_entity_id, normalize_group_compute_result -def ma(s: pd.Series, window: int = 5): +def ma(s: pd.Series, window: int = 5) -> pd.Series: """ :param s: @@ -16,7 +16,7 @@ def ma(s: pd.Series, window: int = 5): return s.rolling(window=window, min_periods=window).mean() -def ema(s, window=12): +def ema(s: pd.Series, window: int = 12) -> pd.Series: return s.ewm(span=window, adjust=False, min_periods=window).mean() @@ -27,19 +27,27 @@ def live_or_dead(x): return -1 -def macd(s, slow=26, fast=12, n=9, return_type='df', normal=False, count_live_dead=False): +def macd( + s: pd.Series, + slow: int = 26, + fast: int = 12, + n: int = 9, + return_type: str = "df", + normal: bool = False, + count_live_dead: bool = False, +): # 短期均线 ema_fast = ema(s, window=fast) # 长期均线 ema_slow = ema(s, window=slow) # 短期均线 - 长期均线 = 趋势的力度 - diff = ema_fast - ema_slow + diff: pd.Series = ema_fast - ema_slow # 力度均线 - dea = diff.ewm(span=n, adjust=False).mean() + dea: pd.Series = diff.ewm(span=n, adjust=False).mean() # 力度 的变化 - m = (diff - dea) * 2 + m: pd.Series = (diff - dea) * 2 # normal it if normal: @@ -52,15 +60,16 @@ def macd(s, slow=26, fast=12, n=9, return_type='df', normal=False, count_live_de bull = (diff > 0) & (dea > 0) live_count = live * (live.groupby((live != live.shift()).cumsum()).cumcount() + 1) - if return_type == 'se': + if return_type == "se": if count_live_dead: return diff, dea, m, live, bull, live_count return diff, dea, m else: if count_live_dead: return pd.DataFrame( - {'diff': diff, 'dea': dea, 'macd': m, 'live': live, 'bull': bull, 'live_count': live_count}) - return pd.DataFrame({'diff': diff, 'dea': dea, 'macd': m}) + {"diff": diff, "dea": dea, "macd": m, "live": live, "bull": bull, "live_count": live_count} + ) + return pd.DataFrame({"diff": diff, "dea": dea, "macd": m}) def point_in_range(point: float, range: tuple): @@ -83,6 +92,28 @@ def intersect_ranges(range_list): return result +def combine(range_a, range_b): + if intersect(range_a, range_b): + return min(range_a[0], range_b[0]), max(range_a[1], range_b[1]) + return None + + +def distance(range_a, range_b, use_max=False): + if use_max: + # 上升 + if range_b[0] >= range_a[1]: + return (range_b[1] - range_a[0]) / range_a[0] + + # 下降 + if range_b[1] <= range_a[0]: + return (range_b[0] - range_a[1]) / range_a[1] + else: + middle_start = (range_a[0] + range_a[1]) / 2 + middle_end = (range_b[0] + range_b[1]) / 2 + + return (middle_end - middle_start) / middle_start + + def intersect(range_a, range_b): """ range_a and range_b with format (start,end) in y axis @@ -117,20 +148,44 @@ def score(self, input_df) -> pd.DataFrame: class MaTransformer(Transformer): - def __init__(self, windows=[5, 10], cal_change_pct=False) -> None: + def __init__(self, windows=None, cal_change_pct=False) -> None: super().__init__() + if windows is None: + windows = [5, 10] self.windows = windows self.cal_change_pct = cal_change_pct + def transform(self, input_df: pd.DataFrame) -> pd.DataFrame: + if self.cal_change_pct: + group_pct = group_by_entity_id(input_df["close"]).pct_change() + input_df["change_pct"] = normalize_group_compute_result(group_pct) + + for window in self.windows: + col = "ma{}".format(window) + self.indicators.append(col) + + group_ma = group_by_entity_id(input_df["close"]).rolling(window=window, min_periods=window).mean() + input_df[col] = normalize_group_compute_result(group_ma) + + return input_df + def transform_one(self, entity_id, df: pd.DataFrame) -> pd.DataFrame: + """ + transform_one would not take effects if transform was implemented. + Just show how to implement it here, most of time you should overwrite transform directly for performance. + + :param entity_id: + :param df: + :return: + """ if self.cal_change_pct: - df['change_pct'] = df['close'].pct_change() + df["change_pct"] = df["close"].pct_change() for window in self.windows: - col = 'ma{}'.format(window) + col = "ma{}".format(window) self.indicators.append(col) - df[col] = df['close'].rolling(window=window, min_periods=window).mean() + df[col] = df["close"].rolling(window=window, min_periods=window).mean() return df @@ -141,64 +196,72 @@ def __init__(self, kdata_overlap=0) -> None: self.kdata_overlap = kdata_overlap def transform(self, input_df) -> pd.DataFrame: + """ + + :param input_df: + :return: + """ if self.kdata_overlap > 0: # 没有重叠,区间就是(0,0) - input_df['overlap'] = [(0, 0)] * len(input_df.index) + input_df["overlap"] = [(0, 0)] * len(input_df.index) def cal_overlap(s): - high = input_df.loc[s.index, 'high'] - low = input_df.loc[s.index, 'low'] + high = input_df.loc[s.index, "high"] + low = input_df.loc[s.index, "low"] intersection = intersect_ranges(list(zip(low.to_list(), high.to_list()))) if intersection: # 设置column overlap为intersection,即重叠区间 - input_df.at[s.index[-1], 'overlap'] = intersection + input_df.at[s.index[-1], "overlap"] = intersection return 0 - input_df[['high', 'low']].groupby(level=0).rolling(window=self.kdata_overlap, - min_periods=self.kdata_overlap).apply( - cal_overlap, raw=False) + input_df[["high", "low"]].groupby(level=0).rolling( + window=self.kdata_overlap, min_periods=self.kdata_overlap + ).apply(cal_overlap, raw=False) return input_df class MaAndVolumeTransformer(Transformer): - def __init__(self, windows=[5, 10], vol_windows=[30], kdata_overlap=0) -> None: + def __init__(self, windows=None, vol_windows=None, kdata_overlap=0) -> None: super().__init__() + if vol_windows is None: + vol_windows = [30] + if windows is None: + windows = [5, 10] self.windows = windows self.vol_windows = vol_windows self.kdata_overlap = kdata_overlap def transform(self, input_df) -> pd.DataFrame: for window in self.windows: - col = 'ma{}'.format(window) + col = f"ma{window}" self.indicators.append(col) - ma_df = input_df['close'].groupby(level=0).rolling(window=window, min_periods=window).mean() + ma_df = input_df["close"].groupby(level=0).rolling(window=window, min_periods=window).mean() ma_df = ma_df.reset_index(level=0, drop=True) input_df[col] = ma_df for vol_window in self.vol_windows: - col = 'vol_ma{}'.format(vol_window) - self.indicators.append(col) + col = "vol_ma{}".format(vol_window) - vol_ma_df = input_df['volume'].groupby(level=0).rolling(window=vol_window, min_periods=vol_window).mean() + vol_ma_df = input_df["volume"].groupby(level=0).rolling(window=vol_window, min_periods=vol_window).mean() vol_ma_df = vol_ma_df.reset_index(level=0, drop=True) input_df[col] = vol_ma_df if self.kdata_overlap > 0: - input_df['overlap'] = [(0, 0)] * len(input_df.index) + input_df["overlap"] = [(0, 0)] * len(input_df.index) def cal_overlap(s): - high = input_df.loc[s.index, 'high'] - low = input_df.loc[s.index, 'low'] + high = input_df.loc[s.index, "high"] + low = input_df.loc[s.index, "low"] intersection = intersect_ranges(list(zip(low.to_list(), high.to_list()))) if intersection: - input_df.at[s.index[-1], 'overlap'] = intersection + input_df.at[s.index[-1], "overlap"] = intersection return 0 - input_df[['high', 'low']].groupby(level=0).rolling(window=self.kdata_overlap, - min_periods=self.kdata_overlap).apply( - cal_overlap, raw=False) + input_df[["high", "low"]].groupby(level=0).rolling( + window=self.kdata_overlap, min_periods=self.kdata_overlap + ).apply(cal_overlap, raw=False) return input_df @@ -212,21 +275,37 @@ def __init__(self, slow=26, fast=12, n=9, normal=False, count_live_dead=False) - self.normal = normal self.count_live_dead = count_live_dead - self.indicators.append('diff') - self.indicators.append('dea') - self.indicators.append('macd') + self.indicators.append("diff") + self.indicators.append("dea") + self.indicators.append("macd") def transform(self, input_df) -> pd.DataFrame: - macd_df = input_df.groupby(level=0)['close'].apply( - lambda x: macd(x, slow=self.slow, fast=self.fast, n=self.n, return_type='df', normal=self.normal, - count_live_dead=self.count_live_dead)) - input_df = pd.concat([input_df, macd_df], axis=1, sort=False) + macd_df = input_df.groupby(level=0)["close"].apply( + lambda x: macd( + x, + slow=self.slow, + fast=self.fast, + n=self.n, + return_type="df", + normal=self.normal, + count_live_dead=self.count_live_dead, + ) + ) + macd_df = macd_df.reset_index(level=0, drop=True) + input_df = pd.concat([input_df, macd_df], axis=1, sort=False, verify_integrity=True) return input_df def transform_one(self, entity_id, df: pd.DataFrame) -> pd.DataFrame: - print(f'transform_one {entity_id} {df}') - return macd(df['close'], slow=self.slow, fast=self.fast, n=self.n, return_type='df', normal=self.normal, - count_live_dead=self.count_live_dead) + print(f"transform_one {entity_id} {df}") + return macd( + df["close"], + slow=self.slow, + fast=self.fast, + n=self.n, + return_type="df", + normal=self.normal, + count_live_dead=self.count_live_dead, + ) class QuantileScorer(Scorer): @@ -237,19 +316,18 @@ def score(self, input_df): self.score_levels.sort(reverse=True) quantile_df = input_df.groupby(level=1).quantile(self.score_levels) - quantile_df.index.names = [self.time_field, 'score'] + quantile_df.index.names = [self.time_field, "score_result"] - self.logger.info('factor:{},quantile:\n{}'.format(self.factor_name, quantile_df)) + self.logger.info("factor:{},quantile:\n{}".format(self.factor_name, quantile_df)) result_df = input_df.copy() - result_df.reset_index(inplace=True, level='entity_id') - result_df['quantile'] = None + result_df.reset_index(inplace=True, level="entity_id") + result_df["quantile"] = None for timestamp in quantile_df.index.levels[0]: - length = len(result_df.loc[result_df.index == timestamp, 'quantile']) - result_df.loc[result_df.index == timestamp, 'quantile'] = [quantile_df.loc[ - timestamp].to_dict()] * length + length = len(result_df.loc[result_df.index == timestamp, "quantile"]) + result_df.loc[result_df.index == timestamp, "quantile"] = [quantile_df.loc[timestamp].to_dict()] * length - self.logger.info('factor:{},df with quantile:\n{}'.format(self.factor_name, result_df)) + self.logger.info("factor:{},df with quantile:\n{}".format(self.factor_name, result_df)) # result_df = result_df.set_index(['entity_id'], append=True) # result_df = result_df.sort_index(level=[0, 1]) @@ -269,20 +347,34 @@ def calculate_score(df, factor_name, quantile): return score for factor in input_df.columns.to_list(): - result_df[factor] = result_df.apply(lambda x: calculate_score(x, factor, x['quantile']), - axis=1) + result_df[factor] = result_df.apply(lambda x: calculate_score(x, factor, x["quantile"]), axis=1) result_df = result_df.reset_index() result_df = normal_index_df(result_df) result_df = result_df.loc[:, self.factors] - result_df = result_df.loc[~result_df.index.duplicated(keep='first')] + result_df = result_df.loc[~result_df.index.duplicated(keep="first")] - self.logger.info('factor:{},df:\n{}'.format(self.factor_name, result_df)) + self.logger.info("factor:{},df:\n{}".format(self.factor_name, result_df)) return result_df # the __all__ is generated -__all__ = ['ma', 'ema', 'macd', 'point_in_range', 'intersect_ranges', 'intersect', 'RankScorer', - 'MaTransformer', 'IntersectTransformer', 'MaAndVolumeTransformer', 'MacdTransformer', 'QuantileScorer'] +__all__ = [ + "ma", + "ema", + "live_or_dead", + "macd", + "point_in_range", + "intersect_ranges", + "combine", + "distance", + "intersect", + "RankScorer", + "MaTransformer", + "IntersectTransformer", + "MaAndVolumeTransformer", + "MacdTransformer", + "QuantileScorer", +] diff --git a/src/zvt/factors/factor_models.py b/src/zvt/factors/factor_models.py new file mode 100644 index 00000000..8eb65324 --- /dev/null +++ b/src/zvt/factors/factor_models.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- +from datetime import datetime +from typing import List, Optional + +from pydantic import BaseModel, Field + +from zvt.contract import IntervalLevel +from zvt.trader import TradingSignalType +from zvt.utils.time_utils import date_time_by_interval, current_date + + +class FactorRequestModel(BaseModel): + factor_name: str + entity_ids: Optional[List[str]] + data_provider: str = Field(default="em") + start_timestamp: datetime = Field(default=date_time_by_interval(current_date(), -365)) + level: IntervalLevel = Field(default=IntervalLevel.LEVEL_1DAY) + + +class TradingSignalModel(BaseModel): + entity_id: str + happen_timestamp: datetime + due_timestamp: datetime + trading_level: IntervalLevel = Field(default=IntervalLevel.LEVEL_1DAY) + trading_signal_type: TradingSignalType + position_pct: Optional[float] = Field(default=0.2) + order_amount: Optional[float] = Field(default=None) + order_money: Optional[float] = Field(default=None) + + +class FactorResultModel(BaseModel): + entity_ids: Optional[List[str]] + tag_reason: str + + +# the __all__ is generated +__all__ = ["FactorRequestModel", "TradingSignalModel", "FactorResultModel"] diff --git a/src/zvt/factors/factor_service.py b/src/zvt/factors/factor_service.py new file mode 100644 index 00000000..01c9ca82 --- /dev/null +++ b/src/zvt/factors/factor_service.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +import pandas as pd + +from zvt.contract import zvt_context +from zvt.domain import Stock +from zvt.factors.factor_models import FactorRequestModel +from zvt.factors.technical_factor import TechnicalFactor +from zvt.trader import TradingSignalType + + +def query_factor_result(factor_request_model: FactorRequestModel): + factor_name = factor_request_model.factor_name + entity_ids = factor_request_model.entity_ids + level = factor_request_model.level + + factor: TechnicalFactor = zvt_context.factor_cls_registry[factor_name]( + provider="em", + entity_provider="em", + entity_schema=Stock, + entity_ids=entity_ids, + level=level, + start_timestamp=factor_request_model.start_timestamp, + ) + df = factor.get_trading_signal_df() + df = df.reset_index(drop=False) + + def to_trading_signal(order_type): + if order_type is None: + return None + if order_type: + return TradingSignalType.open_long + if not order_type: + return TradingSignalType.close_long + + df = df.rename(columns={"timestamp": "happen_timestamp"}) + df["due_timestamp"] = df["happen_timestamp"] + pd.Timedelta(seconds=level.to_second()) + df["trading_signal_type"] = df["filter_result"].apply(lambda x: to_trading_signal(x)) + + print(df) + return df.to_dict(orient="records") + + +# the __all__ is generated +__all__ = ["query_factor_result"] diff --git a/zvt/factors/fundamental/__init__.py b/src/zvt/factors/fundamental/__init__.py similarity index 90% rename from zvt/factors/fundamental/__init__.py rename to src/zvt/factors/fundamental/__init__.py index 9ff3dbe8..509f9c6c 100644 --- a/zvt/factors/fundamental/__init__.py +++ b/src/zvt/factors/fundamental/__init__.py @@ -1,4 +1,6 @@ # -*- coding: utf-8 -*-# + + # the __all__ is generated __all__ = [] @@ -9,4 +11,5 @@ # import all from submodule finance_factor from .finance_factor import * from .finance_factor import __all__ as _finance_factor_all -__all__ += _finance_factor_all \ No newline at end of file + +__all__ += _finance_factor_all diff --git a/src/zvt/factors/fundamental/finance_factor.py b/src/zvt/factors/fundamental/finance_factor.py new file mode 100644 index 00000000..660cd514 --- /dev/null +++ b/src/zvt/factors/fundamental/finance_factor.py @@ -0,0 +1,230 @@ +# -*- coding: utf-8 -*- +import operator +from itertools import accumulate +from typing import List, Union, Type + +import pandas as pd + +from zvt.contract import IntervalLevel, Mixin, TradableEntity +from zvt.contract.factor import Factor, Transformer, Accumulator +from zvt.domain import FinanceFactor, BalanceSheet, Stock + + +class FinanceBaseFactor(Factor): + def __init__( + self, + data_schema: Type[Mixin] = FinanceFactor, + entity_schema: Type[TradableEntity] = Stock, + provider: str = None, + entity_provider: str = None, + entity_ids: List[str] = None, + exchanges: List[str] = None, + codes: List[str] = None, + start_timestamp: Union[str, pd.Timestamp] = None, + end_timestamp: Union[str, pd.Timestamp] = None, + columns: List = None, + filters: List = None, + order: object = None, + limit: int = None, + level: Union[str, IntervalLevel] = None, + category_field: str = "entity_id", + time_field: str = "timestamp", + keep_window: int = None, + keep_all_timestamp: bool = False, + fill_method: str = "ffill", + effective_number: int = None, + transformer: Transformer = None, + accumulator: Accumulator = None, + need_persist: bool = False, + only_compute_factor: bool = False, + factor_name: str = None, + clear_state: bool = False, + only_load_factor: bool = False, + ) -> None: + if not columns: + columns = data_schema.important_cols() + super().__init__( + data_schema, + entity_schema, + provider, + entity_provider, + entity_ids, + exchanges, + codes, + start_timestamp, + end_timestamp, + columns, + filters, + order, + limit, + level, + category_field, + time_field, + keep_window, + keep_all_timestamp, + fill_method, + effective_number, + transformer, + accumulator, + need_persist, + only_compute_factor, + factor_name, + clear_state, + only_load_factor, + ) + + +class GoodCompanyFactor(FinanceBaseFactor): + def __init__( + self, + data_schema: Type[Mixin] = FinanceFactor, + entity_schema: TradableEntity = Stock, + provider: str = None, + entity_provider: str = None, + entity_ids: List[str] = None, + exchanges: List[str] = None, + codes: List[str] = None, + start_timestamp: Union[str, pd.Timestamp] = None, + end_timestamp: Union[str, pd.Timestamp] = None, + columns: List = ( + FinanceFactor.roe, + FinanceFactor.op_income_growth_yoy, + FinanceFactor.net_profit_growth_yoy, + FinanceFactor.report_period, + FinanceFactor.op_net_cash_flow_per_op_income, + FinanceFactor.sales_net_cash_flow_per_op_income, + FinanceFactor.current_ratio, + FinanceFactor.debt_asset_ratio, + ), + filters: List = ( + FinanceFactor.roe >= 0.02, + FinanceFactor.op_income_growth_yoy >= 0.05, + FinanceFactor.net_profit_growth_yoy >= 0.05, + FinanceFactor.op_net_cash_flow_per_op_income >= 0.1, + FinanceFactor.sales_net_cash_flow_per_op_income >= 0.3, + FinanceFactor.current_ratio >= 1, + FinanceFactor.debt_asset_ratio <= 0.5, + ), + order: object = None, + limit: int = None, + level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY, + category_field: str = "entity_id", + time_field: str = "timestamp", + keep_window: int = None, + keep_all_timestamp: bool = True, + fill_method: str = "ffill", + effective_number: int = None, + transformer: Transformer = None, + accumulator: Accumulator = None, + need_persist: bool = False, + only_compute_factor: bool = False, + factor_name: str = None, + clear_state: bool = False, + only_load_factor: bool = False, + window="1095d", + count=8, + col_period_threshold={"roe": 0.02}, + ) -> None: + self.window = window + self.count = count + + # 对于根据年度计算才有意义的指标,比如roe,我们会对不同季度的值区别处理,传入的参数为季度值 + self.col_period_threshold = col_period_threshold + if self.col_period_threshold: + if "report_period" not in columns and (data_schema.report_period not in columns): + columns.append(data_schema.report_period) + + self.logger.info(f"using data_schema:{data_schema.__name__}") + + super().__init__( + data_schema, + entity_schema, + provider, + entity_provider, + entity_ids, + exchanges, + codes, + start_timestamp, + end_timestamp, + columns, + filters, + order, + limit, + level, + category_field, + time_field, + keep_window, + keep_all_timestamp, + fill_method, + effective_number, + transformer, + accumulator, + need_persist, + only_compute_factor, + factor_name, + clear_state, + only_load_factor, + ) + + def compute_factor(self): + def filter_df(df): + se = pd.Series(index=df.index) + for index, row in df.iterrows(): + + if row.report_period == "year": + mul = 4 + elif row.report_period == "season3": + mul = 3 + elif row.report_period == "half_year": + mul = 2 + else: + mul = 1 + + filters = [] + for col in self.col_period_threshold: + col_se = eval(f"row.{col}") + filters.append(col_se >= mul * self.col_period_threshold[col]) + se[index] = list(accumulate(filters, func=operator.__and__))[-1] + + return se + + if self.col_period_threshold: + self.factor_df = self.data_df.loc[lambda df: filter_df(df), :] + + self.factor_df = pd.DataFrame(index=self.data_df.index, columns=["count"], data=1) + + self.factor_df = self.factor_df.reset_index(level=1) + + self.factor_df = self.factor_df.groupby(level=0).rolling(window=self.window, on=self.time_field).count() + + self.factor_df = self.factor_df.reset_index(level=0, drop=True) + self.factor_df = self.factor_df.set_index(self.time_field, append=True) + + self.factor_df = self.factor_df.loc[(slice(None), slice(self.start_timestamp, self.end_timestamp)), :] + + self.logger.info("factor:{},factor_df:\n{}".format(self.name, self.factor_df)) + + def compute_result(self): + self.result_df = self.factor_df.apply(lambda x: x >= self.count) + self.result_df.columns = ["filter_score"] + + self.logger.info("factor:{},result_df:\n{}".format(self.name, self.result_df)) + + +if __name__ == "__main__": + # f1 = GoodCompanyFactor(keep_all_timestamp=False) + # print(f1.result_df) + + # 高股息 低应收 + factor2 = GoodCompanyFactor( + data_schema=BalanceSheet, + columns=[BalanceSheet.accounts_receivable], + filters=[BalanceSheet.accounts_receivable <= 0.2 * BalanceSheet.total_current_assets], + keep_all_timestamp=False, + col_period_threshold=None, + ) + print(factor2.result_df) + + +# the __all__ is generated +__all__ = ["FinanceBaseFactor", "GoodCompanyFactor"] diff --git a/zvt/factors/ma/__init__.py b/src/zvt/factors/ma/__init__.py similarity index 75% rename from zvt/factors/ma/__init__.py rename to src/zvt/factors/ma/__init__.py index 27a9d11e..69d5d8bc 100644 --- a/zvt/factors/ma/__init__.py +++ b/src/zvt/factors/ma/__init__.py @@ -1,4 +1,6 @@ # -*- coding: utf-8 -*-# + + # the __all__ is generated __all__ = [] @@ -6,17 +8,26 @@ # common code of the package # export interface in __all__ which contains __all__ of its sub modules +# import all from submodule ma_stats_factor +from .ma_stats_factor import * +from .ma_stats_factor import __all__ as _ma_stats_factor_all + +__all__ += _ma_stats_factor_all + # import all from submodule top_bottom_factor from .top_bottom_factor import * from .top_bottom_factor import __all__ as _top_bottom_factor_all + __all__ += _top_bottom_factor_all # import all from submodule ma_factor from .ma_factor import * from .ma_factor import __all__ as _ma_factor_all + __all__ += _ma_factor_all # import all from submodule domain from .domain import * from .domain import __all__ as _domain_all -__all__ += _domain_all \ No newline at end of file + +__all__ += _domain_all diff --git a/src/zvt/factors/ma/domain/__init__.py b/src/zvt/factors/ma/domain/__init__.py new file mode 100644 index 00000000..c12d8e36 --- /dev/null +++ b/src/zvt/factors/ma/domain/__init__.py @@ -0,0 +1,27 @@ +# -*- coding: utf-8 -*- + + +# the __all__ is generated +__all__ = [] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule stock_1d_ma_stats_factor +from .stock_1d_ma_stats_factor import * +from .stock_1d_ma_stats_factor import __all__ as _stock_1d_ma_stats_factor_all + +__all__ += _stock_1d_ma_stats_factor_all + +# import all from submodule stock_1d_ma_factor +from .stock_1d_ma_factor import * +from .stock_1d_ma_factor import __all__ as _stock_1d_ma_factor_all + +__all__ += _stock_1d_ma_factor_all + +# import all from submodule common +from .common import * +from .common import __all__ as _common_all + +__all__ += _common_all diff --git a/src/zvt/factors/ma/domain/common.py b/src/zvt/factors/ma/domain/common.py new file mode 100644 index 00000000..f62ad9a5 --- /dev/null +++ b/src/zvt/factors/ma/domain/common.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +from sqlalchemy import Column, Float, Integer + +from zvt.contract import Mixin + + +class MaStatsFactorCommon(Mixin): + open = Column(Float) + close = Column(Float) + high = Column(Float) + low = Column(Float) + turnover = Column(Float) + + ma5 = Column(Float) + ma10 = Column(Float) + + ma34 = Column(Float) + ma55 = Column(Float) + ma89 = Column(Float) + ma144 = Column(Float) + + ma120 = Column(Float) + ma250 = Column(Float) + + vol_ma30 = Column(Float) + + live = Column(Integer) + count = Column(Integer) + distance = Column(Float) + area = Column(Float) + + +# the __all__ is generated +__all__ = ["MaStatsFactorCommon"] diff --git a/zvt/factors/ma/domain/stock_1d_ma_factor.py b/src/zvt/factors/ma/domain/stock_1d_ma_factor.py similarity index 79% rename from zvt/factors/ma/domain/stock_1d_ma_factor.py rename to src/zvt/factors/ma/domain/stock_1d_ma_factor.py index 962b30fc..2ba6ce29 100644 --- a/zvt/factors/ma/domain/stock_1d_ma_factor.py +++ b/src/zvt/factors/ma/domain/stock_1d_ma_factor.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- from sqlalchemy import Column, Float, String -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import declarative_base from zvt.contract import Mixin from zvt.contract.register import register_schema @@ -9,7 +9,7 @@ class Stock1dMaFactor(Stock1dMaFactorBase, Mixin): - __tablename__ = 'Stock1dMaFactor' + __tablename__ = "Stock1dMaFactor" level = Column(String(length=32)) code = Column(String(length=32)) @@ -32,6 +32,8 @@ class Stock1dMaFactor(Stock1dMaFactorBase, Mixin): ma250 = Column(Float) -register_schema(providers=['zvt'], db_name='stock_1d_ma_factor', schema_base=Stock1dMaFactorBase) +register_schema(providers=["zvt"], db_name="stock_1d_ma_factor", schema_base=Stock1dMaFactorBase) + + # the __all__ is generated -__all__ = ['Stock1dMaFactor'] \ No newline at end of file +__all__ = ["Stock1dMaFactor"] diff --git a/src/zvt/factors/ma/domain/stock_1d_ma_stats_factor.py b/src/zvt/factors/ma/domain/stock_1d_ma_stats_factor.py new file mode 100644 index 00000000..72f0adb7 --- /dev/null +++ b/src/zvt/factors/ma/domain/stock_1d_ma_stats_factor.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +from sqlalchemy.orm import declarative_base + +from zvt.contract.register import register_schema +from zvt.factors.ma.domain.common import MaStatsFactorCommon + +Stock1dMaStatsFactorBase = declarative_base() + + +class Stock1dMaStatsFactor(Stock1dMaStatsFactorBase, MaStatsFactorCommon): + __tablename__ = "stock_1d_ma_stats_factor" + + +register_schema(providers=["zvt"], db_name="stock_1d_ma_stats_factor", schema_base=Stock1dMaStatsFactorBase) + + +# the __all__ is generated +__all__ = ["Stock1dMaStatsFactor"] diff --git a/src/zvt/factors/ma/ma_factor.py b/src/zvt/factors/ma/ma_factor.py new file mode 100644 index 00000000..70ad1056 --- /dev/null +++ b/src/zvt/factors/ma/ma_factor.py @@ -0,0 +1,333 @@ +# -*- coding: utf-8 -*- +from typing import List, Union, Type + +import pandas as pd + +from zvt.contract import IntervalLevel, TradableEntity, AdjustType +from zvt.contract.api import get_schema_by_name +from zvt.contract.factor import Accumulator +from zvt.contract.factor import Transformer +from zvt.domain import Stock +from zvt.factors.algorithm import MaTransformer, MaAndVolumeTransformer +from zvt.factors.technical_factor import TechnicalFactor +from zvt.utils.time_utils import now_pd_timestamp + + +def get_ma_factor_schema(entity_type: str, level: Union[IntervalLevel, str] = IntervalLevel.LEVEL_1DAY): + if type(level) == str: + level = IntervalLevel(level) + + schema_str = "{}{}MaFactor".format(entity_type.capitalize(), level.value.capitalize()) + + return get_schema_by_name(schema_str) + + +class MaFactor(TechnicalFactor): + def __init__( + self, + entity_schema: Type[TradableEntity] = Stock, + provider: str = None, + entity_provider: str = None, + entity_ids: List[str] = None, + exchanges: List[str] = None, + codes: List[str] = None, + start_timestamp: Union[str, pd.Timestamp] = None, + end_timestamp: Union[str, pd.Timestamp] = None, + columns: List = None, + filters: List = None, + order: object = None, + limit: int = None, + level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY, + category_field: str = "entity_id", + time_field: str = "timestamp", + keep_window: int = None, + keep_all_timestamp: bool = False, + fill_method: str = "ffill", + effective_number: int = None, + need_persist: bool = False, + only_compute_factor: bool = False, + factor_name: str = None, + clear_state: bool = False, + only_load_factor: bool = False, + adjust_type: Union[AdjustType, str] = None, + windows=None, + ) -> None: + if need_persist: + self.factor_schema = get_ma_factor_schema(entity_type=entity_schema.__name__, level=level) + + if not windows: + windows = [5, 10, 34, 55, 89, 144, 120, 250] + self.windows = windows + transformer: Transformer = MaTransformer(windows=windows) + + super().__init__( + entity_schema, + provider, + entity_provider, + entity_ids, + exchanges, + codes, + start_timestamp, + end_timestamp, + columns, + filters, + order, + limit, + level, + category_field, + time_field, + keep_window, + keep_all_timestamp, + fill_method, + effective_number, + transformer, + None, + need_persist, + only_compute_factor, + factor_name, + clear_state, + only_load_factor, + adjust_type, + ) + + +class CrossMaFactor(MaFactor): + def compute_result(self): + super().compute_result() + cols = [f"ma{window}" for window in self.windows] + s = self.factor_df[cols[0]] > self.factor_df[cols[1]] + current_col = cols[1] + for col in cols[2:]: + s = s & (self.factor_df[current_col] > self.factor_df[col]) + current_col = col + + print(self.factor_df[s]) + self.result_df = s.to_frame(name="filter_result") + + +class VolumeUpMaFactor(TechnicalFactor): + def __init__( + self, + entity_schema: Type[TradableEntity] = Stock, + provider: str = None, + entity_provider: str = None, + entity_ids: List[str] = None, + exchanges: List[str] = None, + codes: List[str] = None, + start_timestamp: Union[str, pd.Timestamp] = None, + end_timestamp: Union[str, pd.Timestamp] = None, + columns: List = None, + filters: List = None, + order: object = None, + limit: int = None, + level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY, + category_field: str = "entity_id", + time_field: str = "timestamp", + keep_window: int = None, + keep_all_timestamp: bool = False, + fill_method: str = "ffill", + effective_number: int = None, + accumulator: Accumulator = None, + need_persist: bool = False, + only_compute_factor: bool = False, + factor_name: str = None, + clear_state: bool = False, + only_load_factor: bool = False, + adjust_type: Union[AdjustType, str] = None, + windows=None, + vol_windows=None, + turnover_threshold=300000000, + turnover_rate_threshold=0.02, + up_intervals=40, + over_mode="and", + ) -> None: + if not windows: + windows = [250] + if not vol_windows: + vol_windows = [30] + + self.windows = windows + self.vol_windows = vol_windows + self.turnover_threshold = turnover_threshold + self.turnover_rate_threshold = turnover_rate_threshold + self.up_intervals = up_intervals + self.over_mode = over_mode + + transformer: Transformer = MaAndVolumeTransformer(windows=windows, vol_windows=vol_windows) + + super().__init__( + entity_schema, + provider, + entity_provider, + entity_ids, + exchanges, + codes, + start_timestamp, + end_timestamp, + columns, + filters, + order, + limit, + level, + category_field, + time_field, + keep_window, + keep_all_timestamp, + fill_method, + effective_number, + transformer, + accumulator, + need_persist, + only_compute_factor, + factor_name, + clear_state, + only_load_factor, + adjust_type, + ) + + def compute_result(self): + super().compute_result() + + # 价格刚上均线 + cols = [f"ma{window}" for window in self.windows] + filter_up = (self.factor_df["close"] > self.factor_df[cols[0]]) & ( + self.factor_df["close"] < 1.15 * self.factor_df[cols[0]] + ) + for col in cols[1:]: + if self.over_mode == "and": + filter_up = filter_up & ( + (self.factor_df["close"] > self.factor_df[col]) + & (self.factor_df["close"] < 1.1 * self.factor_df[col]) + ) + else: + filter_up = filter_up | ( + (self.factor_df["close"] > self.factor_df[col]) + & (self.factor_df["close"] < 1.1 * self.factor_df[col]) + ) + # 放量 + if self.vol_windows: + vol_cols = [f"vol_ma{window}" for window in self.vol_windows] + filter_vol = self.factor_df["volume"] > 2 * self.factor_df[vol_cols[0]] + for col in vol_cols[1:]: + filter_vol = filter_vol & (self.factor_df["volume"] > 2 * self.factor_df[col]) + + # 成交额,换手率过滤 + filter_turnover = (self.factor_df["turnover"] > self.turnover_threshold) & ( + self.factor_df["turnover_rate"] > self.turnover_rate_threshold + ) + s = filter_up & filter_vol & filter_turnover + + # 突破后的时间周期 up_intervals + s[s == False] = None + s = s.groupby(level=0).fillna(method="ffill", limit=self.up_intervals) + s[s.isna()] = False + + # 还在均线附近 + # 1)刚突破 + # 2)突破后,回调到附近 + filter_result = filter_up & s & filter_turnover + + self.result_df = filter_result.to_frame(name="filter_result") + # self.result_df = self.result_df.replace(False, None) + + +class CrossMaVolumeFactor(VolumeUpMaFactor): + def __init__( + self, + entity_schema: Type[TradableEntity] = Stock, + provider: str = None, + entity_provider: str = None, + entity_ids: List[str] = None, + exchanges: List[str] = None, + codes: List[str] = None, + start_timestamp: Union[str, pd.Timestamp] = None, + end_timestamp: Union[str, pd.Timestamp] = None, + columns: List = None, + filters: List = None, + order: object = None, + limit: int = None, + level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY, + category_field: str = "entity_id", + time_field: str = "timestamp", + keep_window: int = None, + keep_all_timestamp: bool = False, + fill_method: str = "ffill", + effective_number: int = None, + accumulator: Accumulator = None, + need_persist: bool = False, + only_compute_factor: bool = False, + factor_name: str = None, + clear_state: bool = False, + only_load_factor: bool = False, + adjust_type: Union[AdjustType, str] = None, + windows=[5, 10, 250], + vol_windows=None, + turnover_threshold=300000000, + turnover_rate_threshold=0.02, + up_intervals=40, + over_mode="and", + ) -> None: + super().__init__( + entity_schema, + provider, + entity_provider, + entity_ids, + exchanges, + codes, + start_timestamp, + end_timestamp, + columns, + filters, + order, + limit, + level, + category_field, + time_field, + keep_window, + keep_all_timestamp, + fill_method, + effective_number, + accumulator, + need_persist, + only_compute_factor, + factor_name, + clear_state, + only_load_factor, + adjust_type, + windows, + vol_windows, + turnover_threshold, + turnover_rate_threshold, + up_intervals, + over_mode, + ) + + def compute_result(self): + # 均线多头排列 + cols = [f"ma{window}" for window in self.windows] + filter_se = self.factor_df[cols[0]] > self.factor_df[cols[1]] + current_col = cols[1] + for col in cols[2:]: + filter_se = filter_se & (self.factor_df[current_col] > self.factor_df[col]) + current_col = col + + filter_se = filter_se & (self.factor_df["turnover"] > self.turnover_threshold) + self.result_df = filter_se.to_frame(name="filter_result") + # self.result_df = self.result_df.replace(False, None) + + +if __name__ == "__main__": + + factor = CrossMaVolumeFactor( + entity_provider="em", + provider="em", + entity_ids=["stock_sz_000338"], + start_timestamp="2020-01-01", + end_timestamp=now_pd_timestamp(), + need_persist=False, + ) + factor.drawer().draw(show=True) + + +# the __all__ is generated +__all__ = ["get_ma_factor_schema", "MaFactor", "CrossMaFactor", "VolumeUpMaFactor", "CrossMaVolumeFactor"] diff --git a/src/zvt/factors/ma/ma_stats_factor.py b/src/zvt/factors/ma/ma_stats_factor.py new file mode 100644 index 00000000..ad9b4f85 --- /dev/null +++ b/src/zvt/factors/ma/ma_stats_factor.py @@ -0,0 +1,223 @@ +# -*- coding: utf-8 -*- +from typing import List, Union, Type, Optional + +import pandas as pd + +from zvt.contract import IntervalLevel, TradableEntity, AdjustType +from zvt.contract.api import get_schema_by_name +from zvt.contract.factor import Accumulator +from zvt.domain import Stock +from zvt.factors.algorithm import live_or_dead +from zvt.factors.technical_factor import TechnicalFactor +from zvt.utils.pd_utils import pd_is_not_null + + +def get_ma_stats_factor_schema(entity_type: str, level: Union[IntervalLevel, str] = IntervalLevel.LEVEL_1DAY): + if type(level) == str: + level = IntervalLevel(level) + + schema_str = "{}{}MaStatsFactor".format(entity_type.capitalize(), level.value.capitalize()) + + return get_schema_by_name(schema_str) + + +class MaStatsAccumulator(Accumulator): + def __init__(self, acc_window: int = 250, windows=None, vol_windows=None) -> None: + super().__init__(acc_window) + self.windows = windows + self.vol_windows = vol_windows + + def acc_one(self, entity_id, df: pd.DataFrame, acc_df: pd.DataFrame, state: dict) -> (pd.DataFrame, dict): + self.logger.info(f"acc_one:{entity_id}") + if pd_is_not_null(acc_df): + df = df[df.index > acc_df.index[-1]] + if pd_is_not_null(df): + self.logger.info(f'compute from {df.iloc[0]["timestamp"]}') + acc_df = pd.concat([acc_df, df]) + else: + self.logger.info("no need to compute") + return acc_df, state + else: + acc_df = df + + for window in self.windows: + col = "ma{}".format(window) + self.indicators.append(col) + + ma_df = acc_df["close"].rolling(window=window, min_periods=window).mean() + acc_df[col] = ma_df + + acc_df["live"] = (acc_df["ma5"] > acc_df["ma10"]).apply(lambda x: live_or_dead(x)) + acc_df["distance"] = (acc_df["ma5"] - acc_df["ma10"]) / acc_df["close"] + + live = acc_df["live"] + acc_df["count"] = live * (live.groupby((live != live.shift()).cumsum()).cumcount() + 1) + + acc_df["bulk"] = (live != live.shift()).cumsum() + area_df = acc_df[["distance", "bulk"]] + acc_df["area"] = area_df.groupby("bulk").cumsum() + + for vol_window in self.vol_windows: + col = "vol_ma{}".format(vol_window) + self.indicators.append(col) + + vol_ma_df = acc_df["turnover"].rolling(window=vol_window, min_periods=vol_window).mean() + acc_df[col] = vol_ma_df + + acc_df = acc_df.set_index("timestamp", drop=False) + return acc_df, state + + +class MaStatsFactor(TechnicalFactor): + def __init__( + self, + entity_schema: Type[TradableEntity] = Stock, + provider: str = None, + entity_provider: str = None, + entity_ids: List[str] = None, + exchanges: List[str] = None, + codes: List[str] = None, + start_timestamp: Union[str, pd.Timestamp] = None, + end_timestamp: Union[str, pd.Timestamp] = None, + filters: List = None, + order: object = None, + limit: int = None, + level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY, + category_field: str = "entity_id", + time_field: str = "timestamp", + keep_window: int = None, + keep_all_timestamp: bool = False, + fill_method: str = "ffill", + effective_number: int = None, + need_persist: bool = True, + only_compute_factor: bool = False, + factor_name: str = None, + clear_state: bool = False, + only_load_factor: bool = False, + adjust_type: Union[AdjustType, str] = None, + windows=None, + vol_windows=None, + ) -> None: + if need_persist: + self.factor_schema = get_ma_stats_factor_schema(entity_type=entity_schema.__name__, level=level) + + if not windows: + windows = [5, 10, 34, 55, 89, 144, 120, 250] + self.windows = windows + + if not vol_windows: + vol_windows = [30] + self.vol_windows = vol_windows + + columns: List = ["id", "entity_id", "timestamp", "level", "open", "close", "high", "low", "turnover"] + + accumulator: Accumulator = MaStatsAccumulator(windows=self.windows, vol_windows=self.vol_windows) + + super().__init__( + entity_schema, + provider, + entity_provider, + entity_ids, + exchanges, + codes, + start_timestamp, + end_timestamp, + columns, + filters, + order, + limit, + level, + category_field, + time_field, + keep_window, + keep_all_timestamp, + fill_method, + effective_number, + None, + accumulator, + need_persist, + only_compute_factor, + factor_name, + clear_state, + only_load_factor, + adjust_type, + ) + + +class TFactor(MaStatsFactor): + def __init__( + self, + entity_schema: Type[TradableEntity] = Stock, + provider: str = None, + entity_provider: str = None, + entity_ids: List[str] = None, + exchanges: List[str] = None, + codes: List[str] = None, + start_timestamp: Union[str, pd.Timestamp] = None, + end_timestamp: Union[str, pd.Timestamp] = None, + filters: List = None, + order: object = None, + limit: int = None, + level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY, + category_field: str = "entity_id", + time_field: str = "timestamp", + keep_window: int = None, + keep_all_timestamp: bool = False, + fill_method: str = "ffill", + effective_number: int = None, + need_persist: bool = True, + only_compute_factor: bool = False, + factor_name: str = None, + clear_state: bool = False, + only_load_factor: bool = True, + adjust_type: Union[AdjustType, str] = None, + windows=None, + vol_windows=None, + ) -> None: + super().__init__( + entity_schema, + provider, + entity_provider, + entity_ids, + exchanges, + codes, + start_timestamp, + end_timestamp, + filters, + order, + limit, + level, + category_field, + time_field, + keep_window, + keep_all_timestamp, + fill_method, + effective_number, + need_persist, + only_compute_factor, + factor_name, + clear_state, + only_load_factor, + adjust_type, + windows, + vol_windows, + ) + + def drawer_sub_df_list(self) -> Optional[List[pd.DataFrame]]: + return [self.factor_df[["area"]]] + + def drawer_factor_df_list(self) -> Optional[List[pd.DataFrame]]: + return [self.factor_df[["ma5", "ma10"]]] + + +if __name__ == "__main__": + codes = ["000338"] + + f = TFactor(codes=codes, only_load_factor=False) + + # distribute(f.factor_df[['area']],'area') + f.draw(show=True) + + +# the __all__ is generated +__all__ = ["get_ma_stats_factor_schema", "MaStatsAccumulator", "MaStatsFactor", "TFactor"] diff --git a/src/zvt/factors/ma/top_bottom_factor.py b/src/zvt/factors/ma/top_bottom_factor.py new file mode 100644 index 00000000..14c7ca72 --- /dev/null +++ b/src/zvt/factors/ma/top_bottom_factor.py @@ -0,0 +1,116 @@ +# -*- coding: utf-8 -*- +from typing import List, Union + +import pandas as pd + +from zvt.contract import AdjustType +from zvt.contract import IntervalLevel, TradableEntity +from zvt.contract.drawer import Drawer +from zvt.contract.factor import Accumulator +from zvt.contract.factor import Transformer +from zvt.contract.reader import DataReader +from zvt.domain import Stock, Stock1dKdata +from zvt.factors.technical_factor import TechnicalFactor +from zvt.utils.time_utils import now_pd_timestamp + + +class TopBottomTransformer(Transformer): + def __init__(self, window=20) -> None: + super().__init__() + self.window = window + + def transform(self, input_df) -> pd.DataFrame: + top_df = input_df["high"].groupby(level=0).rolling(window=self.window, min_periods=self.window).max() + top_df = top_df.reset_index(level=0, drop=True) + input_df["top"] = top_df + + bottom_df = input_df["low"].groupby(level=0).rolling(window=self.window, min_periods=self.window).min() + bottom_df = bottom_df.reset_index(level=0, drop=True) + input_df["bottom"] = bottom_df + + return input_df + + +class TopBottomFactor(TechnicalFactor): + def __init__( + self, + entity_schema: TradableEntity = Stock, + provider: str = None, + entity_provider: str = None, + entity_ids: List[str] = None, + exchanges: List[str] = None, + codes: List[str] = None, + start_timestamp: Union[str, pd.Timestamp] = None, + end_timestamp: Union[str, pd.Timestamp] = None, + columns: List = ["id", "entity_id", "timestamp", "level", "open", "close", "high", "low"], + filters: List = None, + order: object = None, + limit: int = None, + level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY, + category_field: str = "entity_id", + time_field: str = "timestamp", + keep_window: int = None, + keep_all_timestamp: bool = False, + fill_method: str = "ffill", + effective_number: int = None, + accumulator: Accumulator = None, + need_persist: bool = False, + only_compute_factor: bool = False, + factor_name: str = None, + clear_state: bool = False, + only_load_factor: bool = False, + adjust_type: Union[AdjustType, str] = None, + window=30, + ) -> None: + + transformer = TopBottomTransformer(window=window) + + super().__init__( + entity_schema, + provider, + entity_provider, + entity_ids, + exchanges, + codes, + start_timestamp, + end_timestamp, + columns, + filters, + order, + limit, + level, + category_field, + time_field, + keep_window, + keep_all_timestamp, + fill_method, + effective_number, + transformer, + accumulator, + need_persist, + only_compute_factor, + factor_name, + clear_state, + only_load_factor, + adjust_type, + ) + + +if __name__ == "__main__": + factor = TopBottomFactor( + codes=["601318"], + start_timestamp="2005-01-01", + end_timestamp=now_pd_timestamp(), + level=IntervalLevel.LEVEL_1DAY, + window=120, + ) + print(factor.factor_df) + + data_reader1 = DataReader(data_schema=Stock1dKdata, entity_schema=Stock, codes=["601318"]) + + drawer = Drawer(main_df=data_reader1.data_df, factor_df_list=[factor.factor_df[["top", "bottom"]]]) + drawer.draw_kline(show=True) + + +# the __all__ is generated +__all__ = ["TopBottomTransformer", "TopBottomFactor"] diff --git a/zvt/factors/macd/__init__.py b/src/zvt/factors/macd/__init__.py similarity index 90% rename from zvt/factors/macd/__init__.py rename to src/zvt/factors/macd/__init__.py index f773351a..76804cb8 100644 --- a/zvt/factors/macd/__init__.py +++ b/src/zvt/factors/macd/__init__.py @@ -1,4 +1,6 @@ # -*- coding: utf-8 -*- + + # the __all__ is generated __all__ = [] @@ -9,4 +11,5 @@ # import all from submodule macd_factor from .macd_factor import * from .macd_factor import __all__ as _macd_factor_all -__all__ += _macd_factor_all \ No newline at end of file + +__all__ += _macd_factor_all diff --git a/src/zvt/factors/macd/macd_factor.py b/src/zvt/factors/macd/macd_factor.py new file mode 100644 index 00000000..7e12791f --- /dev/null +++ b/src/zvt/factors/macd/macd_factor.py @@ -0,0 +1,69 @@ +# -*- coding: utf-8 -*- +from typing import List, Optional + +import numpy as np +import pandas as pd + +from zvt.factors.algorithm import MacdTransformer +from zvt.factors.technical_factor import TechnicalFactor + + +class MacdFactor(TechnicalFactor): + transformer = MacdTransformer(count_live_dead=True) + + def drawer_factor_df_list(self) -> Optional[List[pd.DataFrame]]: + return None + + def drawer_sub_df_list(self) -> Optional[List[pd.DataFrame]]: + return [self.factor_df[["diff", "dea", "macd"]]] + + def drawer_sub_col_chart(self) -> Optional[dict]: + return {"diff": "line", "dea": "line", "macd": "bar"} + + +class BullFactor(MacdFactor): + def compute_result(self): + super().compute_result() + self.result_df = self.factor_df["bull"].to_frame(name="filter_result") + + +class KeepBullFactor(BullFactor): + keep_window = 10 + + def compute_result(self): + super().compute_result() + df = ( + self.result_df["filter_result"] + .groupby(level=0) + .rolling(window=self.keep_window, min_periods=self.keep_window) + .apply(lambda x: np.logical_and.reduce(x)) + ) + df = df.reset_index(level=0, drop=True) + self.result_df["filter_result"] = df + + +# 金叉 死叉 持续时间 切换点 +class LiveOrDeadFactor(MacdFactor): + pattern = [-5, 1] + + def compute_result(self): + super().compute_result() + self.factor_df["pre"] = self.factor_df["live_count"].shift() + s = (self.factor_df["pre"] <= self.pattern[0]) & (self.factor_df["live_count"] >= self.pattern[1]) + self.result_df = s.to_frame(name="filter_result") + + +class GoldCrossFactor(MacdFactor): + def compute_result(self): + super().compute_result() + s = self.factor_df["live"] == 1 + self.result_df = s.to_frame(name="filter_result") + + +if __name__ == "__main__": + f = GoldCrossFactor(provider="em", entity_provider="em", entity_ids=["stock_sz_000338"]) + f.drawer().draw(show=True) + + +# the __all__ is generated +__all__ = ["MacdFactor", "BullFactor", "KeepBullFactor", "LiveOrDeadFactor", "GoldCrossFactor"] diff --git a/src/zvt/factors/shape.py b/src/zvt/factors/shape.py new file mode 100644 index 00000000..2687c1fc --- /dev/null +++ b/src/zvt/factors/shape.py @@ -0,0 +1,258 @@ +# -*- coding: utf-8 -*- +import json +import logging +from enum import Enum +from typing import List + +import pandas as pd + +from zvt.contract.data_type import Bean +from zvt.contract.drawer import Rect +from zvt.factors.algorithm import intersect +from zvt.utils.time_utils import TIME_FORMAT_ISO8601, to_time_str + +logger = logging.getLogger(__name__) + + +class Direction(Enum): + up = "up" + down = "down" + + def opposite(self): + if self == Direction.up: + return Direction.down + if self == Direction.down: + return Direction.up + + +class Fenxing(Bean): + def __init__(self, state, kdata, index) -> None: + self.state = state + self.kdata = kdata + self.index = index + + +def fenxing_power(left, middle, right, fenxing="tmp_ding"): + if fenxing == "tmp_ding": + a = middle["high"] - middle["close"] + b = middle["high"] - left["high"] + c = middle["high"] - right["high"] + return -(a + b + c) / middle["close"] + if fenxing == "tmp_di": + a = abs(middle["low"] - middle["close"]) + b = abs(middle["low"] - left["low"]) + c = abs(middle["low"] - right["low"]) + return (a + b + c) / middle["close"] + + +def a_include_b(a: pd.Series, b: pd.Series) -> bool: + """ + kdata a includes kdata b + + :param a: + :param b: + :return: + """ + return (a["high"] >= b["high"]) and (a["low"] <= b["low"]) + + +def get_direction(kdata, pre_kdata, current=Direction.up) -> Direction: + if is_up(kdata, pre_kdata): + return Direction.up + if is_down(kdata, pre_kdata): + return Direction.down + + return current + + +def is_up(kdata, pre_kdata): + return kdata["high"] > pre_kdata["high"] + + +def is_down(kdata, pre_kdata): + return kdata["low"] < pre_kdata["low"] + + +def handle_first_fenxing(one_df, step=11): + if step >= len(one_df): + logger.info(f"coult not get fenxing by step {step}, len {len(one_df)}") + return None, None, None, None + + logger.info(f"try to get first fenxing by step {step}") + + df = one_df.iloc[:step] + ding_kdata = df[df["high"].max() == df["high"]] + ding_index = int(ding_kdata.index[-1]) + + di_kdata = df[df["low"].min() == df["low"]] + di_index = int(di_kdata.index[-1]) + + # 确定第一个分型 + if abs(ding_index - di_index) >= 4: + if ding_index > di_index: + fenxing = "bi_di" + fenxing_index = di_index + one_df.loc[di_index, "bi_di"] = True + # 确定第一个分型后,开始遍历的位置 + start_index = ding_index + # 目前的笔的方向,up代表寻找 can_ding;down代表寻找can_di + direction = Direction.up + interval = ding_index - di_index + else: + fenxing = "bi_ding" + fenxing_index = ding_index + one_df.loc[ding_index, "bi_ding"] = True + start_index = di_index + direction = Direction.down + interval = di_index - ding_index + return ( + Fenxing( + state=fenxing, + index=fenxing_index, + kdata={ + "low": float(one_df.loc[fenxing_index]["low"]), + "high": float(one_df.loc[fenxing_index]["high"]), + }, + ), + start_index, + direction, + interval, + ) + else: + logger.info("need add step") + return handle_first_fenxing(one_df, step=step + 1) + + +def handle_zhongshu(points: list, acc_df, end_index, zhongshu_col="zhongshu", zhongshu_change_col="zhongshu_change"): + zhongshu = None + zhongshu_change = None + interval = None + + if len(points) == 4: + x1 = points[0][0] + x2 = points[3][0] + + interval = points[3][2] - points[0][2] + + if points[0][1] < points[1][1]: + # 向下段 + range = intersect((points[0][1], points[1][1]), (points[2][1], points[3][1])) + if range: + y1, y2 = range + # 记录中枢 + zhongshu = Rect(x0=x1, x1=x2, y0=y1, y1=y2) + zhongshu_change = abs(y1 - y2) / y1 + acc_df.loc[end_index, zhongshu_col] = zhongshu + acc_df.loc[end_index, zhongshu_change_col] = zhongshu_change + points = points[-1:] + else: + points = points[1:] + else: + # 向上段 + range = intersect((points[1][1], points[0][1]), (points[3][1], points[2][1])) + if range: + y1, y2 = range + # 记录中枢 + zhongshu = Rect(x0=x1, x1=x2, y0=y1, y1=y2) + zhongshu_change = abs(y1 - y2) / y1 + + acc_df.loc[end_index, zhongshu_col] = zhongshu + acc_df.loc[end_index, zhongshu_change_col] = zhongshu_change + points = points[-1:] + else: + points = points[1:] + return points, zhongshu, zhongshu_change, interval + + +def handle_duan(fenxing_list: List[Fenxing], pre_duan_state="yi"): + state = fenxing_list[0].state + # 1笔区间 + bi1_start = fenxing_list[0].kdata + bi1_end = fenxing_list[1].kdata + # 3笔区间 + bi3_start = fenxing_list[2].kdata + bi3_end = fenxing_list[3].kdata + + if state == "bi_ding": + # 向下段,下-上-下 + + # 第一笔区间 + range1 = (bi1_end["low"], bi1_start["high"]) + # 第三笔区间 + range3 = (bi3_end["low"], bi3_start["high"]) + + # 1,3有重叠,认为第一个段出现 + if intersect(range1, range3): + return "down" + + else: + # 向上段,上-下-上 + + # 第一笔区间 + range1 = (bi1_start["low"], bi1_end["high"]) + # 第三笔区间 + range3 = (bi3_start["low"], bi3_end["high"]) + + # 1,3有重叠,认为第一个段出现 + if intersect(range1, range3): + return "up" + + return pre_duan_state + + +def handle_including(one_df, index, kdata, pre_index, pre_kdata, tmp_direction: Direction): + # 改kdata + if a_include_b(kdata, pre_kdata): + # 长的kdata变短 + if tmp_direction == Direction.up: + one_df.loc[index, "low"] = pre_kdata["low"] + else: + one_df.loc[index, "high"] = pre_kdata["high"] + # 改pre_kdata + elif a_include_b(pre_kdata, kdata): + # 长的pre_kdata变短 + if tmp_direction == Direction.down: + one_df.loc[pre_index, "low"] = kdata["low"] + else: + one_df.loc[pre_index, "high"] = kdata["high"] + + +class FactorStateEncoder(json.JSONEncoder): + def default(self, object): + if isinstance(object, pd.Series): + return object.to_dict() + elif isinstance(object, pd.Timestamp): + return to_time_str(object, fmt=TIME_FORMAT_ISO8601) + elif isinstance(object, Enum): + return object.value + elif isinstance(object, Bean): + return object.dict() + else: + return super().default(object) + + +def decode_rect(dct): + return Rect(x0=dct["x0"], y0=dct["y0"], x1=dct["x1"], y1=dct["y1"]) + + +def decode_fenxing(dct): + return Fenxing(state=dct["state"], kdata=dct["kdata"], index=dct["index"]) + + +# the __all__ is generated +__all__ = [ + "Direction", + "Fenxing", + "fenxing_power", + "a_include_b", + "get_direction", + "is_up", + "is_down", + "handle_first_fenxing", + "handle_zhongshu", + "handle_duan", + "handle_including", + "FactorStateEncoder", + "decode_rect", + "decode_fenxing", +] diff --git a/src/zvt/factors/target_selector.py b/src/zvt/factors/target_selector.py new file mode 100644 index 00000000..75f2773c --- /dev/null +++ b/src/zvt/factors/target_selector.py @@ -0,0 +1,218 @@ +import operator +from enum import Enum +from itertools import accumulate +from typing import List, Optional + +import pandas as pd +from pandas import DataFrame + +from zvt.contract import IntervalLevel +from zvt.contract.factor import Factor +from zvt.domain.meta.stock_meta import Stock +from zvt.utils.pd_utils import index_df, pd_is_not_null, is_filter_result_df, is_score_result_df +from zvt.utils.time_utils import to_pd_timestamp, now_pd_timestamp + + +class TradeType(Enum): + # open_long 代表开多,并应该平掉相应标的的空单 + open_long = "open_long" + # open_short 代表开空,并应该平掉相应标的的多单 + open_short = "open_short" + # keep 代表保持现状,跟主动开仓有区别,有时有仓位是可以保持的,但不适合开新的仓 + keep = "keep" + + +class SelectMode(Enum): + condition_and = "condition_and" + condition_or = "condition_or" + + +class TargetSelector(object): + def __init__( + self, + entity_ids=None, + entity_schema=Stock, + exchanges=None, + codes=None, + start_timestamp=None, + end_timestamp=None, + long_threshold=0.8, + short_threshold=0.2, + level=IntervalLevel.LEVEL_1DAY, + provider=None, + select_mode: SelectMode = SelectMode.condition_and, + ) -> None: + self.entity_ids = entity_ids + self.entity_schema = entity_schema + self.exchanges = exchanges + self.codes = codes + self.provider = provider + self.select_mode = select_mode + + if start_timestamp: + self.start_timestamp = to_pd_timestamp(start_timestamp) + if end_timestamp: + self.end_timestamp = to_pd_timestamp(end_timestamp) + else: + self.end_timestamp = now_pd_timestamp() + + self.long_threshold = long_threshold + self.short_threshold = short_threshold + self.level = level + + self.factors: List[Factor] = [] + self.filter_result = None + self.score_result = None + + self.open_long_df: Optional[DataFrame] = None + self.open_short_df: Optional[DataFrame] = None + self.keep_df: Optional[DataFrame] = None + + self.init_factors( + entity_ids=entity_ids, + entity_schema=entity_schema, + exchanges=exchanges, + codes=codes, + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + level=self.level, + ) + + def init_factors(self, entity_ids, entity_schema, exchanges, codes, start_timestamp, end_timestamp, level): + pass + + def add_factor(self, factor: Factor): + self.check_factor(factor) + self.factors.append(factor) + return self + + def check_factor(self, factor: Factor): + assert factor.level == self.level + + def move_on(self, to_timestamp=None, kdata_use_begin_time=False, timeout=20): + if self.factors: + for factor in self.factors: + factor.move_on(to_timestamp, timeout=timeout) + + self.run() + + def run(self): + """ """ + if self.factors: + filters = [] + scores = [] + for factor in self.factors: + if is_filter_result_df(factor.result_df): + df = factor.result_df[["filter_result"]] + if pd_is_not_null(df): + df.columns = ["score"] + filters.append(df) + else: + raise Exception("no data for factor:{},{}".format(factor.name, factor)) + if is_score_result_df(factor.result_df): + df = factor.result_df[["score_result"]] + if pd_is_not_null(df): + df.columns = ["score"] + scores.append(df) + else: + raise Exception("no data for factor:{},{}".format(factor.name, factor)) + + if filters: + if self.select_mode == SelectMode.condition_and: + self.filter_result = list(accumulate(filters, func=operator.__and__))[-1] + else: + self.filter_result = list(accumulate(filters, func=operator.__or__))[-1] + + if scores: + self.score_result = list(accumulate(scores, func=operator.__add__))[-1] / len(scores) + + self.generate_targets() + + def get_targets(self, timestamp, trade_type: TradeType = TradeType.open_long) -> List[str]: + if trade_type == TradeType.open_long: + df = self.open_long_df + elif trade_type == TradeType.open_short: + df = self.open_short_df + elif trade_type == TradeType.keep: + df = self.keep_df + else: + assert False + + if pd_is_not_null(df): + if timestamp in df.index: + target_df = df.loc[[to_pd_timestamp(timestamp)], :] + return target_df["entity_id"].tolist() + return [] + + def get_targets_between( + self, start_timestamp, end_timestamp, trade_type: TradeType = TradeType.open_long + ) -> List[str]: + if trade_type == TradeType.open_long: + df = self.open_long_df + elif trade_type == TradeType.open_short: + df = self.open_short_df + elif trade_type == TradeType.keep: + df = self.keep_df + else: + assert False + + if pd_is_not_null(df): + index = pd.date_range(start_timestamp, end_timestamp, freq=self.level.to_pd_freq()) + return list(set(df.loc[df.index & index]["entity_id"].tolist())) + return [] + + def get_open_long_targets(self, timestamp): + return self.get_targets(timestamp=timestamp, trade_type=TradeType.open_long) + + def get_open_short_targets(self, timestamp): + return self.get_targets(timestamp=timestamp, trade_type=TradeType.open_short) + + # overwrite it to generate targets + def generate_targets(self): + keep_result = pd.DataFrame() + long_result = pd.DataFrame() + short_result = pd.DataFrame() + + if pd_is_not_null(self.filter_result): + keep_result = self.filter_result[self.filter_result["score"].isna()] + long_result = self.filter_result[self.filter_result["score"] == True] + short_result = self.filter_result[self.filter_result["score"] == False] + + if pd_is_not_null(self.score_result): + score_keep_result = self.score_result[ + (self.score_result["score"] > self.short_threshold) & (self.score_result["score"] < self.long_threshold) + ] + if pd_is_not_null(keep_result): + keep_result = score_keep_result.loc[keep_result.index, :] + else: + keep_result = score_keep_result + + score_long_result = self.score_result[self.score_result["score"] >= self.long_threshold] + if pd_is_not_null(long_result): + long_result = score_long_result.loc[long_result.index, :] + else: + long_result = score_long_result + + score_short_result = self.score_result[self.score_result["score"] <= self.short_threshold] + if pd_is_not_null(short_result): + short_result = score_short_result.loc[short_result.index, :] + else: + short_result = score_short_result + + self.keep_df = self.normalize_result_df(keep_result) + self.open_long_df = self.normalize_result_df(long_result) + self.open_short_df = self.normalize_result_df(short_result) + + def get_result_df(self): + return self.open_long_df + + def normalize_result_df(self, df): + if pd_is_not_null(df): + df = df.reset_index() + df = index_df(df) + df = df.sort_values(by=["score", "entity_id"]) + return df + + +# the __all__ is generated +__all__ = ["TradeType", "SelectMode", "TargetSelector"] diff --git a/src/zvt/factors/technical_factor.py b/src/zvt/factors/technical_factor.py new file mode 100644 index 00000000..d0cc1996 --- /dev/null +++ b/src/zvt/factors/technical_factor.py @@ -0,0 +1,105 @@ +from typing import List, Union, Type, Optional + +import pandas as pd + +from zvt.api.kdata import get_kdata_schema, default_adjust_type +from zvt.contract import IntervalLevel, TradableEntity, AdjustType +from zvt.contract.factor import Factor, Transformer, Accumulator, FactorMeta +from zvt.domain import Stock + + +class TechnicalFactor(Factor, metaclass=FactorMeta): + def __init__( + self, + entity_schema: Type[TradableEntity] = Stock, + provider: str = None, + entity_provider: str = None, + entity_ids: List[str] = None, + exchanges: List[str] = None, + codes: List[str] = None, + start_timestamp: Union[str, pd.Timestamp] = None, + end_timestamp: Union[str, pd.Timestamp] = None, + columns: List = None, + filters: List = None, + order: object = None, + limit: int = None, + level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY, + category_field: str = "entity_id", + time_field: str = "timestamp", + keep_window: int = None, + keep_all_timestamp: bool = False, + fill_method: str = "ffill", + effective_number: int = None, + transformer: Transformer = None, + accumulator: Accumulator = None, + need_persist: bool = False, + only_compute_factor: bool = False, + factor_name: str = None, + clear_state: bool = False, + only_load_factor: bool = False, + adjust_type: Union[AdjustType, str] = None, + ) -> None: + if columns is None: + columns = [ + "id", + "entity_id", + "timestamp", + "level", + "open", + "close", + "high", + "low", + "volume", + "turnover", + "turnover_rate", + ] + + # 股票默认使用后复权 + if not adjust_type: + adjust_type = default_adjust_type(entity_type=entity_schema.__name__) + + self.adjust_type = adjust_type + self.data_schema = get_kdata_schema(entity_schema.__name__, level=level, adjust_type=adjust_type) + + if not factor_name: + if type(level) == str: + factor_name = f"{type(self).__name__.lower()}_{level}" + else: + factor_name = f"{type(self).__name__.lower()}_{level.value}" + + super().__init__( + self.data_schema, + entity_schema, + provider, + entity_provider, + entity_ids, + exchanges, + codes, + start_timestamp, + end_timestamp, + columns, + filters, + order, + limit, + level, + category_field, + time_field, + keep_window, + keep_all_timestamp, + fill_method, + effective_number, + transformer, + accumulator, + need_persist, + only_compute_factor, + factor_name, + clear_state, + only_load_factor, + ) + + def drawer_sub_df_list(self) -> Optional[List[pd.DataFrame]]: + return [self.factor_df[["volume"]]] + + +# the __all__ is generated +__all__ = ["TechnicalFactor"] diff --git a/src/zvt/factors/top_stocks.py b/src/zvt/factors/top_stocks.py new file mode 100644 index 00000000..cb990721 --- /dev/null +++ b/src/zvt/factors/top_stocks.py @@ -0,0 +1,289 @@ +# -*- coding: utf-8 -*- +import json +from typing import List + +from sqlalchemy import Column, String, Integer +from sqlalchemy.orm import declarative_base + +from zvt.api.kdata import get_trade_dates +from zvt.api.selector import ( + get_entity_ids_by_filter, + get_limit_up_stocks, + get_mini_and_small_stock, + get_middle_and_big_stock, +) +from zvt.api.stats import get_top_performance_entities_by_periods, TopType +from zvt.contract import Mixin, AdjustType +from zvt.contract.api import get_db_session +from zvt.contract.factor import TargetType +from zvt.contract.register import register_schema +from zvt.domain import Stock, Stock1dHfqKdata +from zvt.factors.ma.ma_factor import VolumeUpMaFactor +from zvt.utils.time_utils import ( + date_time_by_interval, + to_time_str, + TIME_FORMAT_DAY, + today, + count_interval, + to_pd_timestamp, +) + +TopStocksBase = declarative_base() + + +class TopStocks(TopStocksBase, Mixin): + __tablename__ = "top_stocks" + + short_count = Column(Integer) + short_stocks = Column(String(length=2048)) + + long_count = Column(Integer) + long_stocks = Column(String(length=2048)) + + small_vol_up_count = Column(Integer) + small_vol_up_stocks = Column(String(length=2048)) + + big_vol_up_count = Column(Integer) + big_vol_up_stocks = Column(String(length=2048)) + + all_stocks_count = Column(Integer) + + +register_schema(providers=["zvt"], db_name="top_stocks", schema_base=TopStocksBase) + + +def get_vol_up_stocks(target_date, provider="em", stock_type="small", entity_ids=None): + if stock_type == "small": + current_entity_pool = get_mini_and_small_stock(timestamp=target_date, provider=provider) + turnover_threshold = 300000000 + turnover_rate_threshold = 0.02 + elif stock_type == "big": + current_entity_pool = get_middle_and_big_stock(timestamp=target_date, provider=provider) + turnover_threshold = 300000000 + turnover_rate_threshold = 0.01 + else: + assert False + + if entity_ids: + current_entity_pool = set(current_entity_pool) & set(entity_ids) + + kdata_schema = Stock1dHfqKdata + filters = [ + kdata_schema.timestamp == to_pd_timestamp(target_date), + kdata_schema.turnover >= turnover_threshold, + kdata_schema.turnover_rate >= turnover_rate_threshold, + ] + kdata_df = kdata_schema.query_data( + provider=provider, filters=filters, columns=["entity_id", "timestamp"], index="entity_id" + ) + if current_entity_pool: + current_entity_pool = set(current_entity_pool) & set(kdata_df.index.tolist()) + else: + current_entity_pool = kdata_df.index.tolist() + + factor = VolumeUpMaFactor( + entity_schema=Stock, + entity_provider=provider, + provider=provider, + entity_ids=current_entity_pool, + start_timestamp=date_time_by_interval(target_date, -600), + end_timestamp=target_date, + adjust_type=AdjustType.hfq, + windows=[120, 250], + over_mode="or", + up_intervals=60, + turnover_threshold=turnover_threshold, + turnover_rate_threshold=turnover_rate_threshold, + ) + + stocks = factor.get_targets(timestamp=target_date, target_type=TargetType.positive) + return stocks + + +def update_with_limit_up(): + session = get_db_session(provider="zvt", data_schema=TopStocks) + + top_stocks: List[TopStocks] = TopStocks.query_data( + end_timestamp="2021-07-18", return_type="domain", session=session + ) + for top_stock in top_stocks: + limit_up_stocks = get_limit_up_stocks(timestamp=top_stock.timestamp) + short_stocks = json.loads(top_stock.short_stocks) + stock_list = list(set(short_stocks + limit_up_stocks)) + top_stock.short_stocks = json.dumps(stock_list, ensure_ascii=False) + top_stock.short_count = len(stock_list) + session.add_all(top_stocks) + session.commit() + + +def update_vol_up(): + session = get_db_session(provider="zvt", data_schema=TopStocks) + + top_stocks: List[TopStocks] = TopStocks.query_data( + return_type="domain", start_timestamp="2019-03-27", session=session + ) + for top_stock in top_stocks: + target_date = top_stock.timestamp + count_bj = count_interval("2023-09-01", target_date) + ignore_bj = count_bj < 0 + + entity_ids = get_entity_ids_by_filter( + target_date=target_date, + provider="em", + ignore_delist=False, + ignore_st=False, + ignore_new_stock=False, + ignore_bj=ignore_bj, + ) + small_vol_up_stocks = get_vol_up_stocks( + target_date=target_date, provider="em", stock_type="small", entity_ids=entity_ids + ) + top_stock.small_vol_up_count = len(small_vol_up_stocks) + top_stock.small_vol_up_stocks = json.dumps(small_vol_up_stocks, ensure_ascii=False) + + big_vol_up_stocks = get_vol_up_stocks( + target_date=target_date, provider="em", stock_type="big", entity_ids=entity_ids + ) + top_stock.big_vol_up_count = len(big_vol_up_stocks) + top_stock.big_vol_up_stocks = json.dumps(big_vol_up_stocks, ensure_ascii=False) + session.add(top_stock) + session.commit() + print(f"finish {target_date}") + + +def compute_top_stocks(provider="em", start="2024-01-01"): + latest = TopStocks.query_data(limit=1, order=TopStocks.timestamp.desc(), return_type="domain") + if latest: + start = date_time_by_interval(to_time_str(latest[0].timestamp, fmt=TIME_FORMAT_DAY)) + + trade_days = get_trade_dates(start=start, end=today()) + + for target_date in trade_days: + print(f"to {target_date}") + session = get_db_session(provider="zvt", data_schema=TopStocks) + top_stocks = TopStocks( + id=f"block_zvt_000001_{target_date}", entity_id="block_zvt_000001", timestamp=target_date + ) + + count_bj = count_interval("2023-09-01", target_date) + ignore_bj = count_bj < 0 + + entity_ids = get_entity_ids_by_filter( + target_date=target_date, + provider=provider, + ignore_delist=False, + ignore_st=False, + ignore_new_stock=False, + ignore_bj=ignore_bj, + ) + + short_selected, short_period = get_top_performance_entities_by_periods( + entity_provider=provider, + data_provider=provider, + target_date=target_date, + periods=[*range(1, 20)], + ignore_new_stock=False, + ignore_st=False, + entity_ids=entity_ids, + entity_type="stock", + adjust_type=None, + top_count=30, + turnover_threshold=0, + turnover_rate_threshold=0, + return_type=TopType.positive, + ) + limit_up_stocks = get_limit_up_stocks(timestamp=target_date) + short_selected = list(set(short_selected + limit_up_stocks)) + top_stocks.short_count = len(short_selected) + top_stocks.short_stocks = json.dumps(short_selected, ensure_ascii=False) + + long_period_start = short_period + 1 + long_selected, long_period = get_top_performance_entities_by_periods( + entity_provider=provider, + data_provider=provider, + target_date=target_date, + periods=[*range(long_period_start, long_period_start + 30)], + ignore_new_stock=False, + ignore_st=False, + entity_ids=entity_ids, + entity_type="stock", + adjust_type=None, + top_count=30, + turnover_threshold=0, + turnover_rate_threshold=0, + return_type=TopType.positive, + ) + top_stocks.long_count = len(long_selected) + top_stocks.long_stocks = json.dumps(long_selected, ensure_ascii=False) + + small_vol_up_stocks = get_vol_up_stocks( + target_date=target_date, provider=provider, stock_type="small", entity_ids=entity_ids + ) + top_stocks.small_vol_up_count = len(small_vol_up_stocks) + top_stocks.small_vol_up_stocks = json.dumps(small_vol_up_stocks, ensure_ascii=False) + + big_vol_up_stocks = get_vol_up_stocks( + target_date=target_date, provider=provider, stock_type="big", entity_ids=entity_ids + ) + top_stocks.big_vol_up_count = len(big_vol_up_stocks) + top_stocks.big_vol_up_stocks = json.dumps(big_vol_up_stocks, ensure_ascii=False) + + top_stocks.all_stocks_count = len(entity_ids) + + print(top_stocks) + session.add(top_stocks) + session.commit() + + +def get_top_stocks(target_date, return_type="short"): + datas: List[TopStocks] = TopStocks.query_data( + start_timestamp=target_date, end_timestamp=target_date, return_type="domain" + ) + stocks = [] + if datas: + assert len(datas) == 1 + top_stock = datas[0] + if return_type == "all": + short_stocks = json.loads(top_stock.short_stocks) + long_stocks = json.loads(top_stock.long_stocks) + small_vol_up_stocks = json.loads(top_stock.small_vol_up_stocks) + big_vol_up_stocks = json.loads(top_stock.big_vol_up_stocks) + all_stocks = list(set(short_stocks + long_stocks + small_vol_up_stocks + big_vol_up_stocks)) + return all_stocks + elif return_type == "short": + stocks = json.loads(top_stock.short_stocks) + elif return_type == "long": + stocks = json.loads(top_stock.long_stocks) + elif return_type == "small_vol_up": + stocks = json.loads(top_stock.small_vol_up_stocks) + elif return_type == "big_vol_up": + stocks = json.loads(top_stock.big_vol_up_stocks) + else: + assert False + return stocks + + +if __name__ == "__main__": + compute_top_stocks() + # update_with_limit_up() + # update_vol_up() + # target_date = "2024-03-06" + # stocks = get_top_stocks(target_date=target_date, return_type="short") + # print(stocks) + # stocks = get_top_stocks(target_date=target_date, return_type="long") + # print(stocks) + # stocks = get_top_stocks(target_date=target_date, return_type="small_vol_up") + # print(stocks) + # stocks = get_top_stocks(target_date=target_date, return_type="big_vol_up") + # print(stocks) + + +# the __all__ is generated +__all__ = [ + "TopStocks", + "get_vol_up_stocks", + "update_with_limit_up", + "update_vol_up", + "compute_top_stocks", + "get_top_stocks", +] diff --git a/src/zvt/factors/transformers.py b/src/zvt/factors/transformers.py new file mode 100644 index 00000000..a48a9ceb --- /dev/null +++ b/src/zvt/factors/transformers.py @@ -0,0 +1,94 @@ +# -*- coding: utf-8 -*- +import numpy as np +import pandas as pd + +from zvt.contract.factor import Transformer +from zvt.factors.algorithm import MaTransformer +from zvt.factors.technical_factor import TechnicalFactor +from zvt.utils.pd_utils import group_by_entity_id, normalize_group_compute_result, merge_filter_result +from zvt.utils.time_utils import to_pd_timestamp + + +def _cal_state(s, df, pre, interval, col): + assert len(s) == pre + interval + s = df.loc[s.index, :] + pre_df: pd.DataFrame = s.iloc[:pre, :] + recent_df: pd.DataFrame = s.iloc[-interval:, :] + if pre_df.isnull().values.any() or recent_df.isnull().values.any(): + return np.nan + pre_result = np.logical_and.reduce(pre_df["close"] > pre_df[col]) + recent_result = np.logical_and.reduce(recent_df["close"] < recent_df[col]) + if pre_result and recent_result: + return True + return np.nan + + +class CrossMaTransformer(MaTransformer): + def __init__(self, windows=None, cal_change_pct=False) -> None: + super().__init__(windows, cal_change_pct) + + def transform(self, input_df: pd.DataFrame) -> pd.DataFrame: + input_df = super().transform(input_df) + cols = [f"ma{window}" for window in self.windows] + s = input_df[cols[0]] > input_df[cols[1]] + current_col = cols[1] + for col in cols[2:]: + s = s & (input_df[current_col] > input_df[col]) + current_col = col + input_df["filter_result"] = s + return input_df + + +class SpecificTransformer(Transformer): + def __init__(self, buy_timestamp, sell_timestamp) -> None: + self.buy_timestamp = to_pd_timestamp(buy_timestamp) + self.sell_timestamp = to_pd_timestamp(sell_timestamp) + + def transform(self, input_df: pd.DataFrame) -> pd.DataFrame: + s = input_df[input_df.get_level_values["timestamp"] == self.buy_timestamp] + s[s == False] = None + s[input_df.get_level_values["timestamp"] == self.sell_timestamp] = False + input_df["filter_result"] = s + return input_df + + +class FallBelowTransformer(Transformer): + def __init__(self, window=10, interval=3) -> None: + super().__init__() + self.window = window + self.interval = interval + + def transform(self, input_df: pd.DataFrame) -> pd.DataFrame: + col = f"ma{self.window}" + if col not in input_df.columns: + group_result = ( + group_by_entity_id(input_df["close"]).rolling(window=self.window, min_periods=self.window).mean() + ) + group_result = normalize_group_compute_result(group_result=group_result) + input_df[col] = group_result + + # 连续3(interval)日收在10(window)日线下 + s = input_df["close"] < input_df[col] + s = ( + group_by_entity_id(s) + .rolling(window=self.interval, min_periods=self.interval) + .apply(lambda x: np.logical_and.reduce(x)) + ) + s = normalize_group_compute_result(group_result=s) + # 构造卖点 + s[s == False] = None + s[s == True] = False + input_df = merge_filter_result(input_df=input_df, filter_result=s) + + return input_df + + +if __name__ == "__main__": + # df = Stock1dHfqKdata.query_data(codes=["000338"], index=["entity_id", "timestamp"]) + # df = FallBelowTransformer().transform(df) + # print(df["filter_result"]) + TechnicalFactor(transformer=SpecificTransformer(timestamp="2020-03-01")) + + +# the __all__ is generated +__all__ = ["CrossMaTransformer", "SpecificTransformer", "FallBelowTransformer"] diff --git a/zvt/factors/zen/__init__.py b/src/zvt/factors/zen/__init__.py similarity index 67% rename from zvt/factors/zen/__init__.py rename to src/zvt/factors/zen/__init__.py index 3940b30c..069806a5 100644 --- a/zvt/factors/zen/__init__.py +++ b/src/zvt/factors/zen/__init__.py @@ -1,4 +1,5 @@ -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*-# + # the __all__ is generated __all__ = [] @@ -9,9 +10,17 @@ # import all from submodule zen_factor from .zen_factor import * from .zen_factor import __all__ as _zen_factor_all + __all__ += _zen_factor_all +# import all from submodule base_factor +from .base_factor import * +from .base_factor import __all__ as _base_factor_all + +__all__ += _base_factor_all + # import all from submodule domain from .domain import * from .domain import __all__ as _domain_all -__all__ += _domain_all \ No newline at end of file + +__all__ += _domain_all diff --git a/src/zvt/factors/zen/base_factor.py b/src/zvt/factors/zen/base_factor.py new file mode 100644 index 00000000..a03fd8a3 --- /dev/null +++ b/src/zvt/factors/zen/base_factor.py @@ -0,0 +1,729 @@ +# -*- coding: utf-8 -*- +import json +import logging +from enum import Enum +from typing import List +from typing import Union, Optional, Type + +import numpy as np +import pandas as pd + +from zvt.contract import IntervalLevel, AdjustType +from zvt.contract import TradableEntity +from zvt.contract.api import get_schema_by_name +from zvt.contract.data_type import Bean +from zvt.contract.drawer import Rect +from zvt.contract.factor import Accumulator +from zvt.contract.factor import Transformer +from zvt.domain import Stock, Index, Index1dKdata +from zvt.factors.algorithm import intersect, combine +from zvt.factors.shape import ( + Fenxing, + Direction, + handle_first_fenxing, + decode_rect, + get_direction, + handle_including, + fenxing_power, + handle_duan, +) +from zvt.factors.technical_factor import TechnicalFactor +from zvt.utils.decorator import to_string +from zvt.utils.pd_utils import pd_is_not_null +from zvt.utils.time_utils import TIME_FORMAT_ISO8601, to_time_str + +logger = logging.getLogger(__name__) + + +class FactorStateEncoder(json.JSONEncoder): + def default(self, object): + if isinstance(object, pd.Series): + return object.to_dict() + elif isinstance(object, pd.Timestamp): + return to_time_str(object, fmt=TIME_FORMAT_ISO8601) + elif isinstance(object, Enum): + return object.value + elif isinstance(object, Bean): + return object.dict() + else: + return super().default(object) + + +def get_zen_factor_schema(entity_type: str, level: Union[IntervalLevel, str] = IntervalLevel.LEVEL_1DAY): + if type(level) == str: + level = IntervalLevel(level) + + # z factor schema rule + # 1)name:{SecurityType.value.capitalize()}{IntervalLevel.value.upper()}ZFactor + schema_str = "{}{}ZenFactor".format(entity_type.capitalize(), level.value.capitalize()) + + return get_schema_by_name(schema_str) + + +@to_string +class ZenState(Bean): + def __init__(self, state: dict = None) -> None: + super().__init__() + + if not state: + state = dict() + + # 用于计算未完成段的分型 + self.fenxing_list = state.get("fenxing_list", []) + fenxing_list = [Fenxing(item["state"], item["kdata"], item["index"]) for item in self.fenxing_list] + self.fenxing_list = fenxing_list + + # 目前的方向 + if state.get("direction"): + self.direction = Direction(state.get("direction")) + else: + self.direction = None + + # 候选分型(candidate) + self.can_fenxing = state.get("can_fenxing") + self.can_fenxing_index = state.get("can_fenxing_index") + # 反方向count + self.opposite_count = state.get("opposite_count", 0) + # 目前段的方向 + self.current_duan_state = state.get("current_duan_state", "yi") + + # 记录用于计算中枢的段 + # list of (timestamp,value) + self.duans = state.get("duans", []) + self.bis = state.get("bis", []) + + # 前一个点 + self.pre_bi = state.get("pre_bi") + self.pre_duan = state.get("pre_duan") + + # 目前的merge_zhongshu + self.merge_zhongshu = state.get("merge_zhongshu") + self.merge_zhongshu_level = state.get("merge_zhongshu_level") + self.merge_zhongshu_interval = state.get("merge_zhongshu_interval") + + +def handle_zhongshu( + points: list, + acc_df, + end_index, + zhongshu_col="zhongshu", + zhongshu_change_col="zhongshu_change", +): + zhongshu = None + zhongshu_change = None + interval = None + + if len(points) == 4: + x1 = points[0][0] + x2 = points[3][0] + + interval = points[3][2] - points[0][2] + + if points[0][1] < points[1][1]: + # 向下段 + range = intersect((points[0][1], points[1][1]), (points[2][1], points[3][1])) + if range: + y1, y2 = range + # 记录中枢 + zhongshu = Rect(x0=x1, x1=x2, y0=y1, y1=y2) + zhongshu_change = abs(y1 - y2) / abs(y1) + acc_df.loc[end_index, zhongshu_col] = zhongshu + acc_df.loc[end_index, zhongshu_change_col] = zhongshu_change + points = points[-1:] + else: + points = points[1:] + else: + # 向上段 + range = intersect((points[1][1], points[0][1]), (points[3][1], points[2][1])) + if range: + y1, y2 = range + # 记录中枢 + zhongshu = Rect(x0=x1, x1=x2, y0=y1, y1=y2) + zhongshu_change = abs(y1 - y2) / abs(y1) + + acc_df.loc[end_index, zhongshu_col] = zhongshu + acc_df.loc[end_index, zhongshu_change_col] = zhongshu_change + points = points[-1:] + else: + points = points[1:] + return points, zhongshu, zhongshu_change, interval + + +class ZenAccumulator(Accumulator): + def __init__(self, acc_window: int = 1) -> None: + """ + 算法和概念 + <实体> 某种状态的k线 + [实体] 连续实体排列 + + 两k线的关系有三种: 上涨,下跌,包含 + 上涨: k线高点比之前高,低点比之前高 + 下跌: k线低点比之前低,高点比之前低 + 包含: k线高点比之前高,低点比之前低;反方向,即被包含 + 处理包含关系,长的k线缩短,上涨时,低点取max(low1,low2);下跌时,高点取min(high1,high2) + + 第一个顶(底)分型: 出现连续4根下跌(上涨)k线 + 之后开始寻找 候选底(顶)分型,寻找的过程中有以下状态 + + <临时顶>: 中间k线比两边的高点高,是一条特定的k线 + <临时底>: 中间k线比两边的高点高,是一条特定的k线 + + <候选顶分型>: 连续的<临时顶>取最大 + <候选底分型>: 连续的<临时底>取最小 + 任何时刻只能有一个候选,其之前是一个确定的分型 + + <上升k线>: + <下降k线>: + <连接k线>: 分型之间的k线都可以认为是连接k线,以上为演化过程的中间态 + distance(<候选顶分型>, <连接k线>)>=4 则 <候选顶分型> 变成顶分型 + distance(<候选底分型>, <连接k线>)>=4 则 <候选底分型> 变成底分型 + + <顶分型><连接k线><候选底分型> + <底分型><连接k线><候选顶分型> + """ + super().__init__(acc_window) + + def acc_one(self, entity_id, df: pd.DataFrame, acc_df: pd.DataFrame, state: dict) -> (pd.DataFrame, dict): + self.logger.info(f"acc_one:{entity_id}") + if pd_is_not_null(acc_df): + df = df[df.index > acc_df.index[-1]] + if pd_is_not_null(df): + self.logger.info(f'compute from {df.iloc[0]["timestamp"]}') + # 遍历的开始位置 + start_index = len(acc_df) + + acc_df = pd.concat([acc_df, df]) + + zen_state = ZenState(state) + + acc_df = acc_df.reset_index(drop=True) + current_interval = acc_df.iloc[start_index - 1]["current_interval"] + else: + self.logger.info("no need to compute") + return acc_df, state + else: + acc_df = df + # 笔的底 + acc_df["bi_di"] = False + # 笔的顶 + acc_df["bi_ding"] = False + # 记录笔顶/底分型的值,bi_di取low,bi_ding取high,其他为None,绘图时取有值的连线即为 笔 + acc_df["bi_value"] = np.nan + # 笔的变化 + acc_df["bi_change"] = np.nan + # 笔的斜率 + acc_df["bi_slope"] = np.nan + # 持续的周期 + acc_df["bi_interval"] = np.nan + + # 记录临时分型,不变 + acc_df["tmp_ding"] = False + acc_df["tmp_di"] = False + # 分型的力度 + acc_df["fenxing_power"] = np.nan + + # 目前分型确定的方向 + acc_df["current_direction"] = None + acc_df["current_change"] = np.nan + acc_df["current_interval"] = np.nan + acc_df["current_slope"] = np.nan + # 最近的一个笔中枢 + # acc_df['current_zhongshu'] = np.nan + acc_df["current_zhongshu_change"] = np.nan + acc_df["current_zhongshu_y0"] = np.nan + acc_df["current_zhongshu_y1"] = np.nan + + acc_df["current_merge_zhongshu_change"] = np.nan + acc_df["current_merge_zhongshu_y0"] = np.nan + acc_df["current_merge_zhongshu_y1"] = np.nan + acc_df["current_merge_zhongshu_level"] = np.nan + acc_df["current_merge_zhongshu_interval"] = np.nan + + # 目前走势的临时方向 其跟direction的的关系 确定了下一个分型 + acc_df["tmp_direction"] = None + acc_df["opposite_change"] = np.nan + acc_df["opposite_interval"] = np.nan + acc_df["opposite_slope"] = np.nan + + acc_df["duan_state"] = "yi" + + # 段的底 + acc_df["duan_di"] = False + # 段的顶 + acc_df["duan_ding"] = False + # 记录段顶/底的值,为duan_di时取low,为duan_ding时取high,其他为None,绘图时取有值的连线即为 段 + acc_df["duan_value"] = np.nan + # 段的变化 + acc_df["duan_change"] = np.nan + # 段的斜率 + acc_df["duan_slope"] = np.nan + # 持续的周期 + acc_df["duan_interval"] = np.nan + + # 记录在确定中枢的最后一个段的终点x1,值为Rect(x0,y0,x1,y1) + acc_df["zhongshu"] = None + acc_df["zhongshu_change"] = np.nan + + acc_df["bi_zhongshu"] = None + acc_df["bi_zhongshu_change"] = np.nan + + acc_df["merge_zhongshu"] = None + acc_df["merge_zhongshu_change"] = np.nan + acc_df["merge_zhongshu_level"] = np.nan + acc_df["merge_zhongshu_interval"] = np.nan + + acc_df = acc_df.reset_index(drop=True) + + zen_state = ZenState( + dict( + fenxing_list=[], + direction=None, + can_fenxing=None, + can_fenxing_index=None, + opposite_count=0, + current_duan_state="yi", + duans=[], + pre_bi=None, + pre_duan=None, + merge_zhongshu=None, + ) + ) + + zen_state.fenxing_list: List[Fenxing] = [] + + # 取前11条k线,至多出现一个顶分型+底分型 + # 注:只是一种方便的确定第一个分型的办法,有了第一个分型,后面的处理就比较统一 + # start_index 为遍历开始的位置 + # direction为一个确定分型后的方向,即顶分型后为:down,底分型后为:up + fenxing, start_index, direction, current_interval = handle_first_fenxing(acc_df, step=11) + if not fenxing: + return None, None + + zen_state.fenxing_list.append(fenxing) + zen_state.direction = direction + + # list of (timestamp,value) + zen_state.duans = [] + zen_state.bis = [] + + pre_kdata = acc_df.iloc[start_index - 1] + pre_index = start_index - 1 + + tmp_direction = zen_state.direction + current_merge_zhongshu = decode_rect(zen_state.merge_zhongshu) if zen_state.merge_zhongshu else None + current_merge_zhongshu_change = None + current_merge_zhongshu_interval = zen_state.merge_zhongshu_interval + current_merge_zhongshu_level = zen_state.merge_zhongshu_level + + current_zhongshu = None + current_zhongshu_change = None + for index, kdata in acc_df.iloc[start_index:].iterrows(): + # print(f'timestamp: {kdata.timestamp}') + # 临时方向 + tmp_direction = get_direction(kdata, pre_kdata, current=tmp_direction) + + # current states + current_interval = current_interval + 1 + if zen_state.direction == Direction.up: + pre_value = acc_df.loc[zen_state.fenxing_list[0].index, "low"] + current_value = kdata["high"] + else: + pre_value = acc_df.loc[zen_state.fenxing_list[0].index, "high"] + current_value = kdata["low"] + acc_df.loc[index, "current_direction"] = zen_state.direction.value + acc_df.loc[index, "current_interval"] = current_interval + change = (current_value - pre_value) / abs(pre_value) + acc_df.loc[index, "current_change"] = change + acc_df.loc[index, "current_slope"] = change / current_interval + if current_zhongshu: + # acc_df.loc[index, 'current_zhongshu'] = current_zhongshu + acc_df.loc[index, "current_zhongshu_y0"] = current_zhongshu.y0 + acc_df.loc[index, "current_zhongshu_y1"] = current_zhongshu.y1 + acc_df.loc[index, "current_zhongshu_change"] = current_zhongshu_change + else: + # acc_df.loc[index, 'current_zhongshu'] = acc_df.loc[index - 1, 'current_zhongshu'] + acc_df.loc[index, "current_zhongshu_y0"] = acc_df.loc[index - 1, "current_zhongshu_y0"] + acc_df.loc[index, "current_zhongshu_y1"] = acc_df.loc[index - 1, "current_zhongshu_y1"] + acc_df.loc[index, "current_zhongshu_change"] = acc_df.loc[index - 1, "current_zhongshu_change"] + + if current_merge_zhongshu: + # acc_df.loc[index, 'current_merge_zhongshu'] = current_merge_zhongshu + acc_df.loc[index, "current_merge_zhongshu_y0"] = current_merge_zhongshu.y0 + acc_df.loc[index, "current_merge_zhongshu_y1"] = current_merge_zhongshu.y1 + acc_df.loc[index, "current_merge_zhongshu_change"] = current_merge_zhongshu_change + acc_df.loc[index, "current_merge_zhongshu_level"] = current_merge_zhongshu_level + acc_df.loc[index, "current_merge_zhongshu_interval"] = current_merge_zhongshu_interval + else: + # acc_df.loc[index, 'current_merge_zhongshu'] = acc_df.loc[index - 1, 'current_merge_zhongshu'] + acc_df.loc[index, "current_merge_zhongshu_y0"] = acc_df.loc[index - 1, "current_merge_zhongshu_y0"] + acc_df.loc[index, "current_merge_zhongshu_y1"] = acc_df.loc[index - 1, "current_merge_zhongshu_y1"] + acc_df.loc[index, "current_merge_zhongshu_change"] = acc_df.loc[ + index - 1, "current_merge_zhongshu_change" + ] + acc_df.loc[index, "current_merge_zhongshu_level"] = acc_df.loc[ + index - 1, "current_merge_zhongshu_level" + ] + acc_df.loc[index, "current_merge_zhongshu_interval"] = acc_df.loc[ + index - 1, "current_merge_zhongshu_interval" + ] + + # 处理包含关系 + handle_including( + one_df=acc_df, + index=index, + kdata=kdata, + pre_index=pre_index, + pre_kdata=pre_kdata, + tmp_direction=tmp_direction, + ) + + # 根据方向,寻找对应的分型 和 段 + if zen_state.direction == Direction.up: + tmp_fenxing_col = "tmp_ding" + fenxing_col = "bi_ding" + else: + tmp_fenxing_col = "tmp_di" + fenxing_col = "bi_di" + + # 方向一致,延续中 + if tmp_direction == zen_state.direction: + zen_state.opposite_count = 0 + # 反向,寻找反 分型 + else: + zen_state.opposite_count = zen_state.opposite_count + 1 + + # opposite states + current_interval = zen_state.opposite_count + if tmp_direction == Direction.up: + pre_value = acc_df.loc[index - zen_state.opposite_count, "low"] + current_value = kdata["high"] + else: + pre_value = acc_df.loc[index - zen_state.opposite_count, "high"] + current_value = kdata["low"] + acc_df.loc[index, "tmp_direction"] = tmp_direction.value + acc_df.loc[index, "opposite_interval"] = current_interval + change = (current_value - pre_value) / abs(pre_value) + acc_df.loc[index, "opposite_change"] = change + acc_df.loc[index, "opposite_slope"] = change / current_interval + + # 第一次反向 + if zen_state.opposite_count == 1: + acc_df.loc[pre_index, tmp_fenxing_col] = True + acc_df.loc[pre_index, "fenxing_power"] = fenxing_power( + acc_df.loc[pre_index - 1], + pre_kdata, + kdata, + fenxing=tmp_fenxing_col, + ) + + if zen_state.can_fenxing is not None: + # 候选底分型 + if tmp_direction == Direction.up: + # 取小的 + if pre_kdata["low"] <= zen_state.can_fenxing["low"]: + zen_state.can_fenxing = pre_kdata[["low", "high"]] + zen_state.can_fenxing_index = pre_index + + # 候选顶分型 + else: + # 取大的 + if pre_kdata["high"] >= zen_state.can_fenxing["high"]: + zen_state.can_fenxing = pre_kdata[["low", "high"]] + zen_state.can_fenxing_index = pre_index + else: + zen_state.can_fenxing = pre_kdata[["low", "high"]] + zen_state.can_fenxing_index = pre_index + + # 分型确立 + if zen_state.can_fenxing is not None: + if zen_state.opposite_count >= 4 or (index - zen_state.can_fenxing_index >= 8): + acc_df.loc[zen_state.can_fenxing_index, fenxing_col] = True + + # 记录笔的值 + if fenxing_col == "bi_ding": + bi_value = acc_df.loc[zen_state.can_fenxing_index, "high"] + else: + bi_value = acc_df.loc[zen_state.can_fenxing_index, "low"] + acc_df.loc[zen_state.can_fenxing_index, "bi_value"] = bi_value + + # 计算笔斜率 + if zen_state.pre_bi: + change = (bi_value - zen_state.pre_bi[1]) / abs(zen_state.pre_bi[1]) + interval = zen_state.can_fenxing_index - zen_state.pre_bi[0] + bi_slope = change / interval + acc_df.loc[zen_state.can_fenxing_index, "bi_change"] = change + acc_df.loc[zen_state.can_fenxing_index, "bi_slope"] = bi_slope + acc_df.loc[zen_state.can_fenxing_index, "bi_interval"] = interval + + # 记录用于计算笔中枢的笔 + zen_state.bis.append( + ( + acc_df.loc[zen_state.can_fenxing_index, "timestamp"], + bi_value, + zen_state.can_fenxing_index, + ) + ) + + # 计算笔中枢,当下来说这个 中枢 是确定的,并且是不可变的 + # 但标记的点为 过去,注意在回测时最近的一个中枢可能用到未来函数,前一个才是 已知的 + # 所以记了一个 current_zhongshu_y0 current_zhongshu_y1 这个是可直接使用的 + end_index = zen_state.can_fenxing_index + + ( + zen_state.bis, + current_zhongshu, + current_zhongshu_change, + current_zhongshu_interval, + ) = handle_zhongshu( + points=zen_state.bis, + acc_df=acc_df, + end_index=end_index, + zhongshu_col="bi_zhongshu", + zhongshu_change_col="bi_zhongshu_change", + ) + + if not current_merge_zhongshu: + current_merge_zhongshu = current_zhongshu + current_merge_zhongshu_change = current_zhongshu_change + current_merge_zhongshu_level = 1 + current_merge_zhongshu_interval = current_zhongshu_interval + else: + if current_zhongshu: + range_a = ( + current_merge_zhongshu.y0, + current_merge_zhongshu.y1, + ) + range_b = (current_zhongshu.y0, current_zhongshu.y1) + combine_range = combine(range_a, range_b) + if combine_range: + y0 = combine_range[0] + y1 = combine_range[1] + current_merge_zhongshu = Rect( + x0=current_merge_zhongshu.x0, + x1=current_zhongshu.x1, + y0=y0, + y1=y1, + ) + current_merge_zhongshu_change = abs(y0 - y1) / abs(y0) + current_merge_zhongshu_level = current_merge_zhongshu_level + 1 + current_merge_zhongshu_interval = ( + current_merge_zhongshu_interval + current_zhongshu_interval + ) + else: + current_merge_zhongshu = current_zhongshu + current_merge_zhongshu_change = current_zhongshu_change + current_merge_zhongshu_level = 1 + current_merge_zhongshu_interval = current_zhongshu_interval + + acc_df.loc[end_index, "merge_zhongshu"] = current_merge_zhongshu + acc_df.loc[end_index, "merge_zhongshu_change"] = current_merge_zhongshu_change + acc_df.loc[end_index, "merge_zhongshu_level"] = current_merge_zhongshu_level + acc_df.loc[end_index, "merge_zhongshu_interval"] = current_merge_zhongshu_interval + + zen_state.merge_zhongshu = current_merge_zhongshu + zen_state.merge_zhongshu_interval = current_merge_zhongshu_interval + zen_state.merge_zhongshu_level = current_merge_zhongshu_level + + zen_state.pre_bi = (zen_state.can_fenxing_index, bi_value) + + zen_state.opposite_count = 0 + zen_state.direction = zen_state.direction.opposite() + zen_state.can_fenxing = None + + # 确定第一个段 + if zen_state.fenxing_list != None: + zen_state.fenxing_list.append( + Fenxing( + state=fenxing_col, + kdata={ + "low": float(acc_df.loc[zen_state.can_fenxing_index]["low"]), + "high": float(acc_df.loc[zen_state.can_fenxing_index]["high"]), + }, + index=zen_state.can_fenxing_index, + ) + ) + + if len(zen_state.fenxing_list) == 4: + duan_state = handle_duan( + fenxing_list=zen_state.fenxing_list, + pre_duan_state=zen_state.current_duan_state, + ) + + change = duan_state != zen_state.current_duan_state + + if change: + zen_state.current_duan_state = duan_state + + # 确定状态 + acc_df.loc[ + zen_state.fenxing_list[0].index : zen_state.fenxing_list[-1].index, + "duan_state", + ] = zen_state.current_duan_state + + duan_index = zen_state.fenxing_list[0].index + if zen_state.current_duan_state == "up": + acc_df.loc[duan_index, "duan_di"] = True + duan_value = acc_df.loc[duan_index, "low"] + else: + duan_index = zen_state.fenxing_list[0].index + acc_df.loc[duan_index, "duan_ding"] = True + duan_value = acc_df.loc[duan_index, "high"] + # 记录段的值 + acc_df.loc[duan_index, "duan_value"] = duan_value + + # 计算段斜率 + if zen_state.pre_duan: + change = (duan_value - zen_state.pre_duan[1]) / abs(zen_state.pre_duan[1]) + interval = duan_index - zen_state.pre_duan[0] + duan_slope = change / interval + acc_df.loc[duan_index, "duan_change"] = change + acc_df.loc[duan_index, "duan_slope"] = duan_slope + acc_df.loc[duan_index, "duan_interval"] = interval + + zen_state.pre_duan = (duan_index, duan_value) + + # 记录用于计算中枢的段 + zen_state.duans.append( + ( + acc_df.loc[duan_index, "timestamp"], + duan_value, + duan_index, + ) + ) + + # 计算中枢 + zen_state.duans, _, _, _ = handle_zhongshu( + points=zen_state.duans, + acc_df=acc_df, + end_index=duan_index, + zhongshu_col="zhongshu", + zhongshu_change_col="zhongshu_change", + ) + + # 只留最后一个 + zen_state.fenxing_list = zen_state.fenxing_list[-1:] + else: + # 保持之前的状态并踢出候选 + acc_df.loc[zen_state.fenxing_list[0].index, "duan_state"] = ( + zen_state.current_duan_state + ) + zen_state.fenxing_list = zen_state.fenxing_list[1:] + + pre_kdata = kdata + pre_index = index + + acc_df = acc_df.set_index("timestamp", drop=False) + return acc_df, zen_state + + +class ZenFactor(TechnicalFactor): + accumulator = ZenAccumulator() + + def __init__( + self, + entity_schema: Type[TradableEntity] = Stock, + provider: str = None, + entity_provider: str = None, + entity_ids: List[str] = None, + exchanges: List[str] = None, + codes: List[str] = None, + start_timestamp: Union[str, pd.Timestamp] = None, + end_timestamp: Union[str, pd.Timestamp] = None, + columns: List = None, + filters: List = None, + order: object = None, + limit: int = None, + level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY, + category_field: str = "entity_id", + time_field: str = "timestamp", + keep_window: int = None, + keep_all_timestamp: bool = False, + fill_method: str = "ffill", + effective_number: int = None, + transformer: Transformer = None, + accumulator: Accumulator = None, + need_persist: bool = False, + only_compute_factor: bool = False, + factor_name: str = None, + clear_state: bool = False, + only_load_factor: bool = False, + adjust_type: Union[AdjustType, str] = None, + ) -> None: + self.factor_schema = get_zen_factor_schema(entity_type=entity_schema.__name__, level=level) + super().__init__( + entity_schema, + provider, + entity_provider, + entity_ids, + exchanges, + codes, + start_timestamp, + end_timestamp, + columns, + filters, + order, + limit, + level, + category_field, + time_field, + keep_window, + keep_all_timestamp, + fill_method, + effective_number, + transformer, + accumulator, + need_persist, + only_compute_factor, + factor_name, + clear_state, + only_load_factor, + adjust_type, + ) + + def factor_col_map_object_hook(self) -> dict: + return { + "zhongshu": decode_rect, + "bi_zhongshu": decode_rect, + "merge_zhongshu": decode_rect, + } + + def state_encoder(self): + return FactorStateEncoder + + def drawer_factor_df_list(self) -> Optional[List[pd.DataFrame]]: + bi_value = self.factor_df[["bi_value"]].dropna() + # duan_value = self.factor_df[['duan_value']].dropna() + return [bi_value] + + def drawer_rects(self) -> List[Rect]: + df1 = self.factor_df[["merge_zhongshu"]].dropna() + return df1["merge_zhongshu"].tolist() + + def drawer_sub_df_list(self) -> Optional[List[pd.DataFrame]]: + # bi_slope = self.factor_df[['bi_slope']].dropna() + # duan_slope = self.factor_df[['duan_slope']].dropna() + # power = self.factor_df[['fenxing_power']].dropna() + # zhongshu_change = self.factor_df[['zhongshu_change']].dropna() + # return [bi_slope, duan_slope, power, zhongshu_change] + # change1 = self.factor_df[['current_merge_zhongshu_level']].dropna() + # change2 = self.factor_df[['opposite_change']].dropna() + current_slope = self.factor_df[["current_slope"]].dropna() + return [current_slope] + + +if __name__ == "__main__": + entity_ids = ["index_sh_000001"] + Index1dKdata.record_data(entity_ids=entity_ids) + + f = ZenFactor( + entity_schema=Index, + entity_ids=entity_ids, + need_persist=False, + provider="em", + entity_provider="exchange", + ) + f.draw(show=True) + + +# the __all__ is generated +__all__ = ["FactorStateEncoder", "get_zen_factor_schema", "ZenState", "handle_zhongshu", "ZenAccumulator", "ZenFactor"] diff --git a/zvt/factors/zen/domain/__init__.py b/src/zvt/factors/zen/domain/__init__.py similarity index 71% rename from zvt/factors/zen/domain/__init__.py rename to src/zvt/factors/zen/domain/__init__.py index 2eab91de..97ed0ae6 100644 --- a/zvt/factors/zen/domain/__init__.py +++ b/src/zvt/factors/zen/domain/__init__.py @@ -1,4 +1,5 @@ -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*-# + # the __all__ is generated __all__ = [] @@ -6,17 +7,26 @@ # common code of the package # export interface in __all__ which contains __all__ of its sub modules +# import all from submodule index_1d_zen_factor +from .index_1d_zen_factor import * +from .index_1d_zen_factor import __all__ as _index_1d_zen_factor_all + +__all__ += _index_1d_zen_factor_all + # import all from submodule stock_1wk_zen_factor from .stock_1wk_zen_factor import * from .stock_1wk_zen_factor import __all__ as _stock_1wk_zen_factor_all + __all__ += _stock_1wk_zen_factor_all # import all from submodule common from .common import * from .common import __all__ as _common_all + __all__ += _common_all # import all from submodule stock_1d_zen_factor from .stock_1d_zen_factor import * from .stock_1d_zen_factor import __all__ as _stock_1d_zen_factor_all -__all__ += _stock_1d_zen_factor_all \ No newline at end of file + +__all__ += _stock_1d_zen_factor_all diff --git a/src/zvt/factors/zen/domain/common.py b/src/zvt/factors/zen/domain/common.py new file mode 100644 index 00000000..c9ff3048 --- /dev/null +++ b/src/zvt/factors/zen/domain/common.py @@ -0,0 +1,96 @@ +# -*- coding: utf-8 -*- +from sqlalchemy import Column, Float, String, Boolean, Integer + +from zvt.contract import Mixin + + +class ZenFactorCommon(Mixin): + level = Column(String(length=32)) + # 开盘价 + open = Column(Float) + # 收盘价 + close = Column(Float) + # 最高价 + high = Column(Float) + # 最低价 + low = Column(Float) + # 成交量 + volume = Column(Float) + # 成交金额 + turnover = Column(Float) + + # 笔的底 + bi_di = Column(Boolean) + # 笔的顶 + bi_ding = Column(Boolean) + # 记录笔顶/底分型的值,bi_di取low,bi_ding取high,其他为None,绘图时取有值的连线即为 笔 + bi_value = Column(Float) + # 笔的变化 + bi_change = Column(Float) + # 笔的斜率 + bi_slope = Column(Float) + # 持续的周期 + bi_interval = Column(Integer) + + # 记录临时分型,不变 + tmp_ding = Column(Boolean) + tmp_di = Column(Boolean) + # 分型的力度 + fenxing_power = Column(Float) + + # 目前分型确定的方向 + current_direction = Column(String(length=16)) + current_change = Column(Float) + current_interval = Column(Integer) + current_slope = Column(Float) + # 最近的一个笔中枢 + # current_zhongshu = Column(String(length=512)) + current_zhongshu_y0 = Column(Float) + current_zhongshu_y1 = Column(Float) + current_zhongshu_change = Column(Float) + + current_merge_zhongshu_y0 = Column(Float) + current_merge_zhongshu_y1 = Column(Float) + current_merge_zhongshu_change = Column(Float) + current_merge_zhongshu_level = Column(Integer) + current_merge_zhongshu_interval = Column(Integer) + + # 目前走势的临时方向 其跟direction的的关系 确定了下一个分型 + tmp_direction = Column(String(length=16)) + # 已经确定分型,目前反向才有值 + opposite_change = Column(Float) + opposite_slope = Column(Float) + opposite_interval = Column(Integer) + + duan_state = Column(String(length=32)) + + # 段的底 + duan_di = Column(Boolean) + # 段的顶 + duan_ding = Column(Boolean) + # 记录段顶/底的值,为duan_di时取low,为duan_ding时取high,其他为None,绘图时取有值的连线即为 段 + duan_value = Column(Float) + # 段的变化 + duan_change = Column(Float) + # 段的斜率 + duan_slope = Column(Float) + # 持续的周期 + duan_interval = Column(Integer) + + # 记录在确定中枢的最后一个段的终点x1,值为Rect(x0,y0,x1,y1) + zhongshu = Column(String(length=512)) + zhongshu_change = Column(Float) + + # 记录在确定中枢的最后一个笔的终点x1,值为Rect(x0,y0,x1,y1) + bi_zhongshu = Column(String(length=512)) + bi_zhongshu_change = Column(Float) + + # 从前往后,合并相邻的有重叠的笔中枢 + merge_zhongshu = Column(String(length=512)) + merge_zhongshu_change = Column(Float) + merge_zhongshu_level = Column(Integer) + merge_zhongshu_interval = Column(Integer) + + +# the __all__ is generated +__all__ = ["ZenFactorCommon"] diff --git a/src/zvt/factors/zen/domain/index_1d_zen_factor.py b/src/zvt/factors/zen/domain/index_1d_zen_factor.py new file mode 100644 index 00000000..86b412c0 --- /dev/null +++ b/src/zvt/factors/zen/domain/index_1d_zen_factor.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +from sqlalchemy.ext.declarative import declarative_base + +from zvt.contract.register import register_schema +from zvt.factors.zen.domain.common import ZenFactorCommon + +Index1dZenFactorBase = declarative_base() + + +class Index1dZenFactor(Index1dZenFactorBase, ZenFactorCommon): + __tablename__ = "index_1d_zen_factor" + + +register_schema(providers=["zvt"], db_name="index_1d_zen_factor", schema_base=Index1dZenFactorBase, entity_type="index") + + +# the __all__ is generated +__all__ = ["Index1dZenFactor"] diff --git a/zvt/factors/zen/domain/stock_1d_zen_factor.py b/src/zvt/factors/zen/domain/stock_1d_zen_factor.py similarity index 62% rename from zvt/factors/zen/domain/stock_1d_zen_factor.py rename to src/zvt/factors/zen/domain/stock_1d_zen_factor.py index 20aff4a3..b9ebcf95 100644 --- a/zvt/factors/zen/domain/stock_1d_zen_factor.py +++ b/src/zvt/factors/zen/domain/stock_1d_zen_factor.py @@ -8,9 +8,11 @@ class Stock1dZenFactor(Stock1dZenFactorBase, ZenFactorCommon): - __tablename__ = 'stock_1d_zen_factor' + __tablename__ = "stock_1d_zen_factor" + + +register_schema(providers=["zvt"], db_name="stock_1d_zen_factor", schema_base=Stock1dZenFactorBase, entity_type="stock") -register_schema(providers=['zvt'], db_name='stock_1d_zen_factor', schema_base=Stock1dZenFactorBase) # the __all__ is generated -__all__ = ['Stock1dZenFactor'] +__all__ = ["Stock1dZenFactor"] diff --git a/zvt/factors/zen/domain/stock_1wk_zen_factor.py b/src/zvt/factors/zen/domain/stock_1wk_zen_factor.py similarity index 61% rename from zvt/factors/zen/domain/stock_1wk_zen_factor.py rename to src/zvt/factors/zen/domain/stock_1wk_zen_factor.py index 56cd3f85..b6138086 100644 --- a/zvt/factors/zen/domain/stock_1wk_zen_factor.py +++ b/src/zvt/factors/zen/domain/stock_1wk_zen_factor.py @@ -8,9 +8,13 @@ class Stock1wkZenFactor(Stock1wkZenFactorBase, ZenFactorCommon): - __tablename__ = 'stock_1wk_zen_factor' + __tablename__ = "stock_1wk_zen_factor" + + +register_schema( + providers=["zvt"], db_name="stock_1wk_zen_factor", schema_base=Stock1wkZenFactorBase, entity_type="stock" +) -register_schema(providers=['zvt'], db_name='stock_1wk_zen_factor', schema_base=Stock1wkZenFactorBase) # the __all__ is generated -__all__ = ['Stock1wkZenFactor'] +__all__ = ["Stock1wkZenFactor"] diff --git a/src/zvt/factors/zen/zen_factor.py b/src/zvt/factors/zen/zen_factor.py new file mode 100644 index 00000000..c90c9128 --- /dev/null +++ b/src/zvt/factors/zen/zen_factor.py @@ -0,0 +1,461 @@ +# -*- coding: utf-8 -*- +import logging +import math +from enum import Enum +from typing import List, Optional +from typing import Union, Type + +import pandas as pd + +from zvt.contract import IntervalLevel, AdjustType +from zvt.contract import TradableEntity +from zvt.contract.data_type import Bean +from zvt.contract.drawer import Rect +from zvt.contract.factor import Accumulator +from zvt.contract.factor import Transformer +from zvt.domain import Stock +from zvt.factors.algorithm import distance, intersect +from zvt.factors.zen.base_factor import ZenFactor +from zvt.utils.pd_utils import ( + group_by_entity_id, + normalize_group_compute_result, + pd_is_not_null, +) + +logger = logging.getLogger(__name__) + + +class ZhongshuRange(Enum): + # <=0.4 + small = "small" + # >0.4 + big = "big" + + @classmethod + def of(cls, change): + if change <= 0.4: + return ZhongshuRange.small + else: + return ZhongshuRange.big + + +class ZhongshuLevel(Enum): + # level <= 3 + level1 = "level1" + # 3 < level <=7 + level2 = "level2" + # level > 7 + level3 = "level3" + + @classmethod + def of(cls, level): + if level <= 3: + return ZhongshuLevel.level1 + elif level <= 7: + return ZhongshuLevel.level2 + else: + return ZhongshuLevel.level3 + + +class ZhongshuDistance(Enum): + big_up = "big_up" + big_down = "big_down" + small_up = "small_up" + small_down = "small_down" + + @classmethod + def of(cls, d): + if d is None or math.isnan(d) or d == 0: + zhongshu_distance = None + elif d <= -0.5: + zhongshu_distance = ZhongshuDistance.big_down + elif d < 0: + zhongshu_distance = ZhongshuDistance.small_down + elif d <= 0.5: + zhongshu_distance = ZhongshuDistance.small_up + else: + zhongshu_distance = ZhongshuDistance.big_up + return zhongshu_distance + + +class Zhongshu(object): + def __str__(self) -> str: + if self.zhongshu_distance: + d = self.zhongshu_distance.value + else: + d = None + return f"{self.zhongshu_range.value},{self.zhongshu_level.value},{d}" + + def __eq__(self, o: object) -> bool: + if isinstance(o, self.__class__): + return ( + self.zhongshu_range == o.zhongshu_range + and self.zhongshu_level == o.zhongshu_level + and self.zhongshu_distance == o.zhongshu_distance + ) + return False + + def __init__( + self, + zhongshu_range: ZhongshuRange, + zhongshu_level: ZhongshuLevel, + zhongshu_distance: ZhongshuDistance, + ) -> None: + self.zhongshu_range = zhongshu_range + self.zhongshu_level = zhongshu_level + self.zhongshu_distance = zhongshu_distance + + +def category_zen_state(): + all_states = [] + + for zhongshu_range in ZhongshuRange: + for zhongshu_level in ZhongshuLevel: + for distance in ZhongshuDistance: + pass + + +class ZenState(Bean): + def __eq__(self, o: object) -> bool: + if isinstance(o, self.__class__): + return self.zhongshu_list == o.zhongshu_list + + def __str__(self) -> str: + return ",".join([f"{elem}" for elem in self.zhongshu_list]) + + def __init__(self, zhongshu_state_list: List) -> None: + self.zhongshu_list: List[Zhongshu] = [] + self.zhongshu_state_list = zhongshu_state_list + + pre_range = None + for zhongshu_state in zhongshu_state_list: + current_range = (zhongshu_state[0], zhongshu_state[1]) + d = None + if pre_range is None: + pre_range = current_range + else: + d = distance(pre_range, current_range) + pre_range = current_range + change = zhongshu_state[2] + level = zhongshu_state[3] + + zhongshu_range = ZhongshuRange.of(change=change) + zhongshu_level = ZhongshuLevel.of(level=level) + zhongshu_distance = ZhongshuDistance.of(d=d) + + zhongshu = Zhongshu( + zhongshu_range=zhongshu_range, + zhongshu_level=zhongshu_level, + zhongshu_distance=zhongshu_distance, + ) + + self.zhongshu_list.append(zhongshu) + + +def cal_distance(s): + d_list = [] + current_range = None + print(s) + for idx, row in s.items(): + d = None + if row is not None: + if current_range is None: + current_range = row + else: + d = distance((current_range.y0, current_range.y1), (row.y0, row.y1)) + current_range = row + d_list.append(d) + return pd.Series(index=s.index, data=d_list) + + +def cal_zen_state(s): + zen_states = [] + zhongshu_state_list = [] + current_zhongshu_state = None + for idx, row in s.items(): + # row + # 0 current_merge_zhongshu_y0 + # 1 current_merge_zhongshu_y1 + # 2 current_merge_zhongshu_change + # 3 current_merge_zhongshu_level + # 4 current_merge_zhongshu_interval + if row[0] is not None and not math.isnan(row[0]): + if current_zhongshu_state != row: + # 相同的中枢,保留最近的(包含关系时产生) + if current_zhongshu_state != None and intersect( + (current_zhongshu_state[0], current_zhongshu_state[1]), + (row[0], row[1]), + ): + zhongshu_state_list = zhongshu_state_list[:-1] + + # 最多保留最近5个 + zhongshu_state_list = zhongshu_state_list[-4:] + [row] + current_zhongshu_state = row + + if len(zhongshu_state_list) == 5: + zen_states.append(ZenState(zhongshu_state_list)) + else: + zen_states.append(None) + return pd.Series(index=s.index, data=zen_states) + + +def good_state(zen_state: ZenState): + if zen_state: + zhongshu0 = zen_state.zhongshu_list[0] + zhongshu1 = zen_state.zhongshu_list[1] + zhongshu2 = zen_state.zhongshu_list[2] + zhongshu3 = zen_state.zhongshu_list[3] + zhongshu4 = zen_state.zhongshu_list[4] + + # 没大涨过 + if ZhongshuDistance.big_up not in ( + zhongshu1.zhongshu_distance, + zhongshu2.zhongshu_distance, + zhongshu3.zhongshu_distance, + zhongshu4.zhongshu_distance, + ): + if ZhongshuRange.big not in ( + zhongshu3.zhongshu_range, + zhongshu4.zhongshu_range, + ): + # 最近一个窄幅震荡 + if ZhongshuRange.small == zhongshu4.zhongshu_range and ZhongshuLevel.level1 != zhongshu4.zhongshu_level: + return True + + return False + + +def trending_state(zen_state: ZenState): + if zen_state: + zhongshu0 = zen_state.zhongshu_list[0] + zhongshu1 = zen_state.zhongshu_list[1] + zhongshu2 = zen_state.zhongshu_list[2] + zhongshu3 = zen_state.zhongshu_list[3] + zhongshu4 = zen_state.zhongshu_list[4] + + # 没大涨过 + if ZhongshuDistance.big_up not in ( + zhongshu1.zhongshu_distance, + zhongshu2.zhongshu_distance, + zhongshu3.zhongshu_distance, + ): + if ZhongshuRange.big not in ( + zhongshu3.zhongshu_range, + zhongshu4.zhongshu_range, + ): + # 最近一个窄幅震荡 + if ZhongshuRange.small == zhongshu4.zhongshu_range and ZhongshuLevel.level1 == zhongshu4.zhongshu_level: + return True + + return False + + +class TrendingFactor(ZenFactor): + def __init__( + self, + entity_schema: Type[TradableEntity] = Stock, + provider: str = None, + entity_provider: str = None, + entity_ids: List[str] = None, + exchanges: List[str] = None, + codes: List[str] = None, + start_timestamp: Union[str, pd.Timestamp] = None, + end_timestamp: Union[str, pd.Timestamp] = None, + columns: List = None, + filters: List = None, + order: object = None, + limit: int = None, + level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY, + category_field: str = "entity_id", + time_field: str = "timestamp", + keep_window: int = None, + keep_all_timestamp: bool = False, + fill_method: str = "ffill", + effective_number: int = None, + transformer: Transformer = None, + accumulator: Accumulator = None, + need_persist: bool = False, + only_compute_factor: bool = False, + factor_name: str = None, + clear_state: bool = False, + only_load_factor: bool = True, + adjust_type: Union[AdjustType, str] = None, + ) -> None: + super().__init__( + entity_schema, + provider, + entity_provider, + entity_ids, + exchanges, + codes, + start_timestamp, + end_timestamp, + columns, + filters, + order, + limit, + level, + category_field, + time_field, + keep_window, + keep_all_timestamp, + fill_method, + effective_number, + transformer, + accumulator, + need_persist, + only_compute_factor, + factor_name, + clear_state, + only_load_factor, + adjust_type, + ) + + def compute_result(self): + super().compute_result() + if pd_is_not_null(self.factor_df): + df = self.factor_df.apply( + lambda x: ( + x["current_merge_zhongshu_y0"], + x["current_merge_zhongshu_y1"], + x["current_merge_zhongshu_change"], + x["current_merge_zhongshu_level"], + x["current_merge_zhongshu_interval"], + ), + axis=1, + ) + + state_df = group_by_entity_id(df).apply(cal_zen_state) + print(self.factor_df) + print(state_df) + self.factor_df["zen_state"] = normalize_group_compute_result(state_df) + self.factor_df["good_state"] = self.factor_df["zen_state"].apply(good_state) + + s = self.factor_df["good_state"] + self.result_df = s.to_frame(name="filter_result") + + +class ShakingFactor(ZenFactor): + # 震荡区间 + shaking_range = 0.5 + + def __init__( + self, + entity_schema: Type[TradableEntity] = Stock, + provider: str = None, + entity_provider: str = None, + entity_ids: List[str] = None, + exchanges: List[str] = None, + codes: List[str] = None, + start_timestamp: Union[str, pd.Timestamp] = None, + end_timestamp: Union[str, pd.Timestamp] = None, + columns: List = None, + filters: List = None, + order: object = None, + limit: int = None, + level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY, + category_field: str = "entity_id", + time_field: str = "timestamp", + keep_window: int = None, + keep_all_timestamp: bool = False, + fill_method: str = "ffill", + effective_number: int = None, + transformer: Transformer = None, + accumulator: Accumulator = None, + need_persist: bool = False, + only_compute_factor: bool = False, + factor_name: str = None, + clear_state: bool = False, + only_load_factor: bool = True, + adjust_type: Union[AdjustType, str] = None, + ) -> None: + super().__init__( + entity_schema, + provider, + entity_provider, + entity_ids, + exchanges, + codes, + start_timestamp, + end_timestamp, + columns, + filters, + order, + limit, + level, + category_field, + time_field, + keep_window, + keep_all_timestamp, + fill_method, + effective_number, + transformer, + accumulator, + need_persist, + only_compute_factor, + factor_name, + clear_state, + only_load_factor, + adjust_type, + ) + + def drawer_sub_df_list(self) -> Optional[List[pd.DataFrame]]: + df1 = self.factor_df[["current_merge_zhongshu_y1"]].dropna() + df2 = self.factor_df[["current_merge_zhongshu_y0"]].dropna() + return [df1, df2] + + def drawer_rects(self) -> List[Rect]: + return super().drawer_rects() + + def compute_result(self): + super().compute_result() + # 窄幅震荡 + s1 = self.factor_df["current_merge_zhongshu_change"] <= self.shaking_range + # 中枢级别 + s2 = self.factor_df["current_merge_zhongshu_level"] >= 2 + s3 = self.factor_df["current_merge_zhongshu_interval"] >= 120 + + # 中枢上缘 + s4 = self.factor_df["close"] <= 1.1 * self.factor_df["current_merge_zhongshu_y1"] + s5 = self.factor_df["close"] >= 0.9 * self.factor_df["current_merge_zhongshu_y1"] + + # 中枢下缘 + s6 = self.factor_df["close"] <= 1.1 * self.factor_df["current_merge_zhongshu_y0"] + s7 = self.factor_df["close"] >= 0.9 * self.factor_df["current_merge_zhongshu_y0"] + + s = s1 & s2 & s3 & ((s4 & s5) | (s6 & s7)) + # s = s.groupby(level=0).apply(drop_continue_duplicate) + if s.index.nlevels == 3: + s = s.reset_index(level=0, drop=True) + + self.result_df = s.to_frame(name="filter_result") + print(self.result_df) + + +if __name__ == "__main__": + entity_ids = ["stock_sz_000338"] + + f = ZenFactor( + provider="em", + entity_schema=Stock, + entity_ids=entity_ids, + need_persist=True, + ) + f.draw(show=True) + + +# the __all__ is generated +__all__ = [ + "ZhongshuRange", + "ZhongshuLevel", + "ZhongshuDistance", + "Zhongshu", + "category_zen_state", + "ZenState", + "cal_distance", + "cal_zen_state", + "good_state", + "trending_state", + "TrendingFactor", + "ShakingFactor", +] diff --git a/src/zvt/fill_project.py b/src/zvt/fill_project.py new file mode 100644 index 00000000..1cd4acf9 --- /dev/null +++ b/src/zvt/fill_project.py @@ -0,0 +1,107 @@ +# script to auto generate some files +from zvt.autocode.generator import gen_kdata_schema, gen_exports +from zvt.contract import AdjustType +from zvt.contract import IntervalLevel + + +def gen_kdata_schemas(): + """ + generate kdata(OHLC) schemas for tradable entity + + """ + # A股行情 + gen_kdata_schema( + pkg="zvt", + providers=["em", "qmt", "joinquant"], + entity_type="stock", + levels=[ + level for level in IntervalLevel if level not in (IntervalLevel.LEVEL_L2_QUOTE, IntervalLevel.LEVEL_TICK) + ], + adjust_types=[None, AdjustType.hfq], + entity_in_submodule=True, + ) + # 中国期货 + gen_kdata_schema( + pkg="zvt", + providers=["em"], + entity_type="future", + levels=[IntervalLevel.LEVEL_1DAY], + entity_in_submodule=True, + ) + + # 美股 + gen_kdata_schema( + pkg="zvt", + providers=["em"], + entity_type="stockus", + levels=[IntervalLevel.LEVEL_1DAY], + adjust_types=[None, AdjustType.hfq], + entity_in_submodule=True, + ) + # 美指 + gen_kdata_schema( + pkg="zvt", + providers=["em"], + entity_type="indexus", + levels=[IntervalLevel.LEVEL_1DAY], + entity_in_submodule=True, + ) + + # 港股 + gen_kdata_schema( + pkg="zvt", + providers=["em"], + entity_type="stockhk", + levels=[IntervalLevel.LEVEL_1DAY], + adjust_types=[None, AdjustType.hfq], + entity_in_submodule=True, + ) + + # 板块行情 + gen_kdata_schema( + pkg="zvt", + providers=["em"], + entity_type="block", + levels=[IntervalLevel.LEVEL_1DAY, IntervalLevel.LEVEL_1WEEK, IntervalLevel.LEVEL_1MON], + entity_in_submodule=True, + ) + + # A股指数行情 + gen_kdata_schema( + pkg="zvt", + providers=["em", "sina"], + entity_type="index", + levels=[IntervalLevel.LEVEL_1DAY, IntervalLevel.LEVEL_1WEEK], + entity_in_submodule=True, + ) + + # etf行情 + gen_kdata_schema( + pkg="zvt", providers=["sina"], entity_type="etf", levels=[IntervalLevel.LEVEL_1DAY], entity_in_submodule=True + ) + + # currency行情 + gen_kdata_schema( + pkg="zvt", providers=["em"], entity_type="currency", levels=[IntervalLevel.LEVEL_1DAY], entity_in_submodule=True + ) + + +if __name__ == "__main__": + # gen_exports("api") + # gen_exports("broker") + # gen_exports("common") + # gen_exports("contract", export_from_package=True, export_modules=["schema"]) + # gen_exports("domain", export_from_package=True) + gen_exports("factors", export_from_package=True) + # gen_exports("trading") + + # gen_exports("ml") + # gen_exports("utils", export_from_package=False, export_var=True) + # gen_exports('informer') + # gen_exports('trader') + # gen_exports('autocode') + # gen_exports("zhdate") + # gen_exports("recorders", export_from_package=True, exclude_modules=["qmt"]) + # gen_exports("tag", export_from_package=False) + # gen_kdata_schemas() + # zip_dir(ZVT_TEST_DATA_PATH, zip_file_name=DATA_SAMPLE_ZIP_PATH) diff --git a/zvt/informer/__init__.py b/src/zvt/informer/__init__.py similarity index 92% rename from zvt/informer/__init__.py rename to src/zvt/informer/__init__.py index 067421b5..20ab0129 100644 --- a/zvt/informer/__init__.py +++ b/src/zvt/informer/__init__.py @@ -9,4 +9,5 @@ # import all from submodule informer from .informer import * from .informer import __all__ as _informer_all -__all__ += _informer_all \ No newline at end of file + +__all__ += _informer_all diff --git a/src/zvt/informer/inform_utils.py b/src/zvt/informer/inform_utils.py new file mode 100644 index 00000000..4be0f3c8 --- /dev/null +++ b/src/zvt/informer/inform_utils.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +import eastmoneypy +import requests + +from zvt import zvt_config +from zvt.contract.api import get_entities +from zvt.informer import EmailInformer + + +def inform_email(entity_ids, entity_type, target_date, title, provider): + msg = "no targets" + if entity_ids: + entities = get_entities(provider=provider, entity_type=entity_type, entity_ids=entity_ids, return_type="domain") + assert len(entities) == len(entity_ids) + + infos = [f"{entity.name}({entity.code})" for entity in entities] + msg = "\n".join(infos) + "\n" + + EmailInformer().send_message(zvt_config["email_username"], f"{target_date} {title}", msg) + + +def add_to_eastmoney(codes, group, entity_type="stock", over_write=True, headers_list=None): + if headers_list is None: + headers_list = [None] + + for headers in headers_list: + with requests.Session() as session: + group_id = eastmoneypy.get_group_id(group, session=session, headers=headers) + + need_create_group = False + + if not group_id: + need_create_group = True + + if group_id and over_write: + eastmoneypy.del_group(group_name=group, session=session, headers=headers) + need_create_group = True + + codes = set(codes) + if need_create_group: + result = eastmoneypy.create_group(group_name=group, session=session, headers=headers) + group_id = result["gid"] + else: + current_codes = eastmoneypy.list_entities(group_id=group_id, session=session, headers=headers) + if current_codes: + codes = codes - set(current_codes) + + for code in codes: + eastmoneypy.add_to_group( + code=code, entity_type=entity_type, group_id=group_id, session=session, headers=headers + ) + + +def clean_eastmoney_groups(keep, headers_list=None): + if headers_list is None: + headers_list = [None] + + for headers in headers_list: + if keep is None: + keep = ["自选股"] + with requests.Session() as session: + groups = eastmoneypy.get_groups(session=session, headers=headers) + groups_to_clean = [group["gid"] for group in groups if group["gname"] not in keep] + for gid in groups_to_clean: + eastmoneypy.del_group(group_id=gid, session=session, headers=headers) + + +def delete_eastmoney_group(group_name, headers_list=None): + if headers_list is None: + headers_list = [None] + for headers in headers_list: + with requests.Session() as session: + eastmoneypy.del_group(group_name=group_name, session=session, headers=headers) + + +# the __all__ is generated +__all__ = ["inform_email", "add_to_eastmoney", "clean_eastmoney_groups", "delete_eastmoney_group"] diff --git a/zvt/informer/informer.py b/src/zvt/informer/informer.py similarity index 51% rename from zvt/informer/informer.py rename to src/zvt/informer/informer.py index 9ba74ed5..9076e4af 100644 --- a/zvt/informer/informer.py +++ b/src/zvt/informer/informer.py @@ -11,10 +11,10 @@ from zvt import zvt_config +logger = logging.getLogger(__name__) -class Informer(object): - logger = logging.getLogger(__name__) +class Informer(object): def send_message(self, to_user, title, body, **kwargs): pass @@ -25,38 +25,50 @@ def __init__(self, ssl=True) -> None: self.ssl = ssl def send_message_(self, to_user, title, body, **kwargs): - host = zvt_config['smtp_host'] - port = zvt_config['smtp_port'] - if self.ssl: - try: - smtp_client = smtplib.SMTP_SSL(host=host, port=port) - except: - smtp_client = smtplib.SMTP_SSL() - else: - try: - smtp_client = smtplib.SMTP(host=host, port=port) - except: - smtp_client = smtplib.SMTP() - - smtp_client.connect(host=host, port=port) - smtp_client.login(zvt_config['email_username'], zvt_config['email_password']) - msg = MIMEMultipart('alternative') - msg['Subject'] = Header(title).encode() - msg['From'] = "{} <{}>".format(Header('zvt').encode(), zvt_config['email_username']) - if type(to_user) is list: - msg['To'] = ", ".join(to_user) - else: - msg['To'] = to_user - msg['Message-id'] = email.utils.make_msgid() - msg['Date'] = email.utils.formatdate() - - plain_text = MIMEText(body, _subtype='plain', _charset='UTF-8') - msg.attach(plain_text) - + if ( + not zvt_config["smtp_host"] + or not zvt_config["smtp_port"] + or not zvt_config["email_username"] + or not zvt_config["email_password"] + ): + logger.warning(f"Please set smtp_host/smtp_port/email_username/email_password in ~/zvt-home/config.json") + return + host = zvt_config["smtp_host"] + port = zvt_config["smtp_port"] + + smtp_client = None try: - smtp_client.sendmail(zvt_config['email_username'], to_user, msg.as_string()) + if self.ssl: + try: + smtp_client = smtplib.SMTP_SSL(host=host, port=port) + except: + smtp_client = smtplib.SMTP_SSL() + else: + try: + smtp_client = smtplib.SMTP(host=host, port=port) + except: + smtp_client = smtplib.SMTP() + + smtp_client.connect(host=host, port=port) + smtp_client.login(zvt_config["email_username"], zvt_config["email_password"]) + msg = MIMEMultipart("alternative") + msg["Subject"] = Header(title).encode() + msg["From"] = "{} <{}>".format(Header("zvt").encode(), zvt_config["email_username"]) + if type(to_user) is list: + msg["To"] = ", ".join(to_user) + else: + msg["To"] = to_user + msg["Message-id"] = email.utils.make_msgid() + msg["Date"] = email.utils.formatdate() + + plain_text = MIMEText(body, _subtype="plain", _charset="UTF-8") + msg.attach(plain_text) + smtp_client.sendmail(zvt_config["email_username"], to_user, msg.as_string()) except Exception as e: - self.logger.exception('send email failed', e) + logger.exception("send email failed", e) + finally: + if smtp_client: + smtp_client.quit() def send_message(self, to_user, title, body, sub_size=20, with_sender=True, **kwargs): if type(to_user) is list and sub_size: @@ -69,9 +81,9 @@ def send_message(self, to_user, title, body, sub_size=20, with_sender=True, **kw step_size = 1 for step in range(step_size): - sub_to_user = to_user[sub_size * step:sub_size * (step + 1)] + sub_to_user = to_user[sub_size * step : sub_size * (step + 1)] if with_sender: - sub_to_user.append(zvt_config['email_username']) + sub_to_user.append(zvt_config["email_username"]) self.send_message_(sub_to_user, title, body, **kwargs) else: self.send_message_(to_user, title, body, **kwargs) @@ -79,7 +91,8 @@ def send_message(self, to_user, title, body, sub_size=20, with_sender=True, **kw class WechatInformer(Informer): GET_TOKEN_URL = "https://api.weixin.qq.com/cgi-bin/token?grant_type=client_credential&appid={}&secret={}".format( - zvt_config['wechat_app_id'], zvt_config['wechat_app_secrect']) + zvt_config["wechat_app_id"], zvt_config["wechat_app_secrect"] + ) GET_TEMPLATE_URL = "https://api.weixin.qq.com/cgi-bin/template/get_all_private_template?access_token={}" SEND_MSG_URL = "https://api.weixin.qq.com/cgi-bin/message/template/send?access_token={}" @@ -91,29 +104,29 @@ def __init__(self) -> None: def refresh_token(self): resp = requests.get(self.GET_TOKEN_URL) - self.logger.info("refresh_token resp.status_code:{}, resp.text:{}".format(resp.status_code, resp.text)) + logger.info("refresh_token resp.status_code:{}, resp.text:{}".format(resp.status_code, resp.text)) - if resp.status_code == 200 and resp.json() and 'access_token' in resp.json(): - self.token = resp.json()['access_token'] + if resp.status_code == 200 and resp.json() and "access_token" in resp.json(): + self.token = resp.json()["access_token"] else: - self.logger.exception("could not refresh_token") + logger.exception("could not refresh_token") def send_price_notification(self, to_user, security_name, current_price, change_pct): the_json = self._format_price_notification(to_user, security_name, current_price, change_pct) - the_data = json.dumps(the_json, ensure_ascii=False).encode('utf-8') + the_data = json.dumps(the_json, ensure_ascii=False).encode("utf-8") resp = requests.post(self.SEND_MSG_URL.format(self.token), the_data) - self.logger.info("send_price_notification resp:{}".format(resp.text)) + logger.info("send_price_notification resp:{}".format(resp.text)) if resp.json() and resp.json()["errcode"] == 0: - self.logger.info("send_price_notification to user:{} data:{} success".format(to_user, the_json)) + logger.info("send_price_notification to user:{} data:{} success".format(to_user, the_json)) def _format_price_notification(self, to_user, security_name, current_price, change_pct): if change_pct > 0: - title = '吃肉喝汤' + title = "吃肉喝汤" else: - title = '关灯吃面' + title = "关灯吃面" # 先固定一个template @@ -126,44 +139,29 @@ def _format_price_notification(self, to_user, security_name, current_price, chan # "example": "您好,腾新控股最新价130.50元,上涨达到设置的3.2%\r\n股票名:腾讯控股(00700)\r\n最新价:130.50元\r\n涨跌幅:+3.2%\r\n点击查看最新实时行情。" # } - template_id = 'mkqi-L1h56mH637vLXiuS_ulLTs1byDYYgLBbSXQ65U' + template_id = "mkqi-L1h56mH637vLXiuS_ulLTs1byDYYgLBbSXQ65U" the_json = { "touser": to_user, "template_id": template_id, "url": "http://www.foolcage.com", "data": { - "first": { - "value": title, - "color": "#173177" - }, - "keyword1": { - "value": security_name, - "color": "#173177" - }, - "keyword2": { - "value": current_price, - "color": "#173177" - }, - "keyword3": { - "value": '{:.2%}'.format(change_pct), - "color": "#173177" - }, - "remark": { - "value": "会所嫩模 Or 下海干活?", - "color": "#173177" - } - } + "first": {"value": title, "color": "#173177"}, + "keyword1": {"value": security_name, "color": "#173177"}, + "keyword2": {"value": current_price, "color": "#173177"}, + "keyword3": {"value": "{:.2%}".format(change_pct), "color": "#173177"}, + "remark": {"value": "会所嫩模 Or 下海干活?", "color": "#173177"}, + }, } return the_json -if __name__ == '__main__': +if __name__ == "__main__": email_action = EmailInformer() - email_action.send_message(["5533061@qq.com", '2315983623@qq.com'], 'helo', 'just a test', sub_size=20) + email_action.send_message(["5533061@qq.com", "2315983623@qq.com"], "helo", "just a test", sub_size=20) # weixin_action = WechatInformer() # weixin_action.send_price_notification(to_user='oRvNP0XIb9G3g6a-2fAX9RHX5--Q', security_name='BTC/USDT', # current_price=1000, change_pct='0.5%') # the __all__ is generated -__all__ = ['Informer', 'EmailInformer', 'WechatInformer'] \ No newline at end of file +__all__ = ["Informer", "EmailInformer", "WechatInformer"] diff --git a/zvt/main.py b/src/zvt/main.py similarity index 52% rename from zvt/main.py rename to src/zvt/main.py index c2e7f010..d2d092e2 100644 --- a/zvt/main.py +++ b/src/zvt/main.py @@ -1,8 +1,7 @@ import dash_bootstrap_components as dbc -import dash_html_components as html +from dash import html from dash.dependencies import Input, Output -from zvt import init_plugins from zvt.ui import zvt_app from zvt.ui.apps import factor_app @@ -11,17 +10,11 @@ def serve_layout(): layout = html.Div( children=[ # banner - html.Div( - className="zvt-banner", - children=html.H2(className="h2-title", children="ZVT") - ), + html.Div(className="zvt-banner", children=html.H2(className="h2-title", children="ZVT")), dbc.CardHeader( dbc.Tabs( - [ - dbc.Tab(label="factor", tab_id="tab-factor", label_style={}, tab_style={"width": "100px"}) - ], + [dbc.Tab(label="factor", tab_id="tab-factor", label_style={}, tab_style={"width": "100px"})], id="card-tabs", - card=True, active_tab="tab-factor", ) ), @@ -32,11 +25,9 @@ def serve_layout(): return layout -@zvt_app.callback( - Output("card-content", "children"), [Input("card-tabs", "active_tab")] -) +@zvt_app.callback(Output("card-content", "children"), [Input("card-tabs", "active_tab")]) def tab_content(active_tab): - if 'tab-factor' == active_tab: + if "tab-factor" == active_tab: return factor_app.factor_layout() @@ -44,10 +35,10 @@ def tab_content(active_tab): def main(): - init_plugins() - zvt_app.run_server(debug=True) + # init_plugins() + zvt_app.run_server(debug=True, host="0.0.0.0") # zvt_app.run_server() -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/src/zvt/misc/__init__.py b/src/zvt/misc/__init__.py new file mode 100644 index 00000000..2686fff0 --- /dev/null +++ b/src/zvt/misc/__init__.py @@ -0,0 +1,5 @@ +# -*- coding: utf-8 -*- + + +# the __all__ is generated +__all__ = [] diff --git a/src/zvt/misc/constants.py b/src/zvt/misc/constants.py new file mode 100644 index 00000000..7fc21639 --- /dev/null +++ b/src/zvt/misc/constants.py @@ -0,0 +1,428 @@ +# -*- coding: utf-8 -*- +CHINESEYEARCODE = [ + 19416, + 19168, + 42352, + 21717, + 53856, + 55632, + 91476, + 22176, + 39632, + 21970, + 19168, + 42422, + 42192, + 53840, + 119381, + 46400, + 54944, + 44450, + 38320, + 84343, + 18800, + 42160, + 46261, + 27216, + 27968, + 109396, + 11104, + 38256, + 21234, + 18800, + 25958, + 54432, + 59984, + 92821, + 23248, + 11104, + 100067, + 37600, + 116951, + 51536, + 54432, + 120998, + 46416, + 22176, + 107956, + 9680, + 37584, + 53938, + 43344, + 46423, + 27808, + 46416, + 86869, + 19872, + 42416, + 83315, + 21168, + 43432, + 59728, + 27296, + 44710, + 43856, + 19296, + 43748, + 42352, + 21088, + 62051, + 55632, + 23383, + 22176, + 38608, + 19925, + 19152, + 42192, + 54484, + 53840, + 54616, + 46400, + 46752, + 103846, + 38320, + 18864, + 43380, + 42160, + 45690, + 27216, + 27968, + 44870, + 43872, + 38256, + 19189, + 18800, + 25776, + 29859, + 59984, + 27480, + 23232, + 43872, + 38613, + 37600, + 51552, + 55636, + 54432, + 55888, + 30034, + 22176, + 43959, + 9680, + 37584, + 51893, + 43344, + 46240, + 47780, + 44368, + 21977, + 19360, + 42416, + 86390, + 21168, + 43312, + 31060, + 27296, + 44368, + 23378, + 19296, + 42726, + 42208, + 53856, + 60005, + 54576, + 23200, + 30371, + 38608, + 19195, + 19152, + 42192, + 118966, + 53840, + 54560, + 56645, + 46496, + 22224, + 21938, + 18864, + 42359, + 42160, + 43600, + 111189, + 27936, + 44448, + 84835, + 37744, + 18936, + 18800, + 25776, + 92326, + 59984, + 27296, + 108228, + 43744, + 37600, + 53987, + 51552, + 54615, + 54432, + 55888, + 23893, + 22176, + 42704, + 21972, + 21200, + 43448, + 43344, + 46240, + 46758, + 44368, + 21920, + 43940, + 42416, + 21168, + 45683, + 26928, + 29495, + 27296, + 44368, + 84821, + 19296, + 42352, + 21732, + 53600, + 59752, + 54560, + 55968, + 92838, + 22224, + 19168, + 43476, + 41680, + 53584, + 62034, + 54560, +] +""" +从1900年到2100年的农历月份数据代码 20位二进制代码表示一个年份的数据。 + +前四位0:表示闰月为29天,1:表示闰月为30天 +中间12位:从左起表示1-12月每月的大小,1为30天,0为29天 +最后四位:表示闰月的月份,0表示当年无闰月 + +前四位和最后四位应该结合使用,如果最后四位为0,则不考虑前四位 +例: +1901年代码为 19168,转成二进制为 0b100101011100000, 最后四位为0,当年无闰月,月份数据为 010010101110 分别代表12月的大小情况 +1903年代码为 21717,转成二进制为 0b101010011010101,最后四位为5,当年为闰五月,首四位为0,闰月为29天,月份数据为 010101001101 分别代表12月的大小情况 + +""" + +CHINESENEWYEAR = [ + "19000131", + "19010219", + "19020208", + "19030129", + "19040216", + "19050204", + "19060125", + "19070213", + "19080202", + "19090122", + "19100210", + "19110130", + "19120218", + "19130206", + "19140126", + "19150214", + "19160203", + "19170123", + "19180211", + "19190201", + "19200220", + "19210208", + "19220128", + "19230216", + "19240205", + "19250124", + "19260213", + "19270202", + "19280123", + "19290210", + "19300130", + "19310217", + "19320206", + "19330126", + "19340214", + "19350204", + "19360124", + "19370211", + "19380131", + "19390219", + "19400208", + "19410127", + "19420215", + "19430205", + "19440125", + "19450213", + "19460202", + "19470122", + "19480210", + "19490129", + "19500217", + "19510206", + "19520127", + "19530214", + "19540203", + "19550124", + "19560212", + "19570131", + "19580218", + "19590208", + "19600128", + "19610215", + "19620205", + "19630125", + "19640213", + "19650202", + "19660121", + "19670209", + "19680130", + "19690217", + "19700206", + "19710127", + "19720215", + "19730203", + "19740123", + "19750211", + "19760131", + "19770218", + "19780207", + "19790128", + "19800216", + "19810205", + "19820125", + "19830213", + "19840202", + "19850220", + "19860209", + "19870129", + "19880217", + "19890206", + "19900127", + "19910215", + "19920204", + "19930123", + "19940210", + "19950131", + "19960219", + "19970207", + "19980128", + "19990216", + "20000205", + "20010124", + "20020212", + "20030201", + "20040122", + "20050209", + "20060129", + "20070218", + "20080207", + "20090126", + "20100214", + "20110203", + "20120123", + "20130210", + "20140131", + "20150219", + "20160208", + "20170128", + "20180216", + "20190205", + "20200125", + "20210212", + "20220201", + "20230122", + "20240210", + "20250129", + "20260217", + "20270206", + "20280126", + "20290213", + "20300203", + "20310123", + "20320211", + "20330131", + "20340219", + "20350208", + "20360128", + "20370215", + "20380204", + "20390124", + "20400212", + "20410201", + "20420122", + "20430210", + "20440130", + "20450217", + "20460206", + "20470126", + "20480214", + "20490202", + "20500123", + "20510211", + "20520201", + "20530219", + "20540208", + "20550128", + "20560215", + "20570204", + "20580124", + "20590212", + "20600202", + "20610121", + "20620209", + "20630129", + "20640217", + "20650205", + "20660126", + "20670214", + "20680203", + "20690123", + "20700211", + "20710131", + "20720219", + "20730207", + "20740127", + "20750215", + "20760205", + "20770124", + "20780212", + "20790202", + "20800122", + "20810209", + "20820129", + "20830217", + "20840206", + "20850126", + "20860214", + "20870203", + "20880124", + "20890210", + "20900130", + "20910218", + "20920207", + "20930127", + "20940215", + "20950205", + "20960125", + "20970212", + "20980201", + "20990121", + "21000209", +] +""" +从1900年,至2100年每年的农历春节的公历日期 +""" + + +# the __all__ is generated +__all__ = [] diff --git a/src/zvt/misc/misc_models.py b/src/zvt/misc/misc_models.py new file mode 100644 index 00000000..33f3de50 --- /dev/null +++ b/src/zvt/misc/misc_models.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +from datetime import datetime + +from zvt.contract.model import CustomModel + + +class TimeMessage(CustomModel): + # 时间 + timestamp: datetime + # 信息 + message: str + + +# the __all__ is generated +__all__ = ["TimeMessage"] diff --git a/src/zvt/misc/misc_service.py b/src/zvt/misc/misc_service.py new file mode 100644 index 00000000..de016644 --- /dev/null +++ b/src/zvt/misc/misc_service.py @@ -0,0 +1,73 @@ +# -*- coding: utf-8 -*- +from zvt.misc.zhdate import ZhDate +from zvt.utils.time_utils import to_pd_timestamp, current_date, count_interval + + +def holiday_distance(timestamp=None, consider_future_days=15): + if not timestamp: + the_date = current_date() + else: + the_date = to_pd_timestamp(timestamp) + + # 业绩预告 + month = the_date.month + + infos = [f"今天是{the_date.date()}"] + if month == 12: + infos.append("业绩预告期,注意排雷") + + # 元旦 + new_year = to_pd_timestamp(f"{the_date.year + 1}-01-01") + distance = count_interval(the_date, new_year) + if 0 < distance < consider_future_days: + infos.append(f"距离元旦还有{distance}天") + if month in (1, 2): + # 春节 + zh_date = ZhDate(lunar_year=the_date.year, lunar_month=1, lunar_day=1) + spring_date = zh_date.newyear + distance = count_interval(the_date, spring_date) + if 0 < distance < consider_future_days: + infos.append(f"距离春节还有{distance}天") + + # 两会 + # 三月初 + lianghui = to_pd_timestamp(f"{the_date.year}-03-01") + distance = count_interval(the_date, lianghui) + if 0 < distance < consider_future_days: + infos.append(f"距离两会还有{distance}天") + + # 年报发布 + if month in (3, 4): + infos.append("年报发布期,注意排雷") + + # 五一 + if month == 4: + wuyi = to_pd_timestamp(f"{the_date.year}-05-01") + distance = count_interval(the_date, wuyi) + if 0 < distance < consider_future_days: + infos.append(f"距离五一还有{distance}天") + + # 业绩发布 + if month in (7, 8): + infos.append("半年报发布期,注意排雷") + + if month == 9: + # 国庆 + shiyi = to_pd_timestamp(f"{the_date.year}-10-01") + distance = count_interval(the_date, shiyi) + if 0 < distance < consider_future_days: + infos.append(f"距离国庆还有{distance}天") + + msg = ",".join(infos) + return msg + + +def get_time_message(): + return {"timestamp": current_date(), "message": holiday_distance()} + + +if __name__ == "__main__": + print(get_time_message()) + +# the __all__ is generated +__all__ = ["holiday_distance", "get_time_message"] diff --git a/src/zvt/misc/zhdate.py b/src/zvt/misc/zhdate.py new file mode 100644 index 00000000..3ec4b24e --- /dev/null +++ b/src/zvt/misc/zhdate.py @@ -0,0 +1,261 @@ +""" +-*- coding: utf-8 -*- +thanks to https://github.com/CutePandaSh/zhdate +""" +from datetime import datetime, timedelta +from itertools import accumulate + +from zvt.misc.constants import CHINESEYEARCODE, CHINESENEWYEAR + + +class ZhDate: + def __init__(self, lunar_year, lunar_month, lunar_day, leap_month=False): + """初始化函数 + + Arguments: + lunar_year {int} -- 农历年 + lunar_month {int} -- 农历月份 + lunar_day {int} -- 农历日 + + Keyword Arguments: + leap_month {bool} -- 是否是在农历闰月中 (default: {False}) + """ + self.lunar_year = lunar_year + self.lunar_month = lunar_month + self.lunar_day = lunar_day + self.leap_month = leap_month + self.year_code = CHINESEYEARCODE[self.lunar_year - 1900] + self.newyear = datetime.strptime(CHINESENEWYEAR[self.lunar_year - 1900], "%Y%m%d") + if not ZhDate.validate(lunar_year, lunar_month, lunar_day, leap_month): + raise TypeError("农历日期不支持所谓“{}”,超出农历1900年1月1日至2100年12月29日,或日期不存在".format(self)) + + def to_datetime(self): + """农历日期转换称公历日期 + + Returns: + datetime -- 当前农历对应的公历日期 + """ + return self.newyear + timedelta(days=self.__days_passed()) + + @staticmethod + def from_datetime(dt): + """静态方法,从公历日期生成农历日期 + + Arguments: + dt {datetime} -- 公历的日期 + + Returns: + ZhDate -- 生成的农历日期对象 + """ + lunar_year = dt.year + # 如果还没有到农历正月初一 农历年份减去1 + lunar_year -= (datetime.strptime(CHINESENEWYEAR[lunar_year - 1900], "%Y%m%d") - dt).total_seconds() > 0 + # 当时农历新年时的日期对象 + newyear_dt = datetime.strptime(CHINESENEWYEAR[lunar_year - 1900], "%Y%m%d") + # 查询日期距离当年的春节差了多久 + days_passed = (dt - newyear_dt).days + # 被查询日期的年份码 + year_code = CHINESEYEARCODE[lunar_year - 1900] + # 取得本年的月份列表 + month_days = ZhDate.decode(year_code) + + for pos, days in enumerate(accumulate(month_days)): + if days_passed + 1 <= days: + month = pos + 1 + lunar_day = month_days[pos] - (days - days_passed) + 1 + break + + leap_month = False + if (year_code & 0xF) == 0 or month <= (year_code & 0xF): + lunar_month = month + else: + lunar_month = month - 1 + + if (year_code & 0xF) != 0 and month == (year_code & 0xF) + 1: + leap_month = True + + return ZhDate(lunar_year, lunar_month, lunar_day, leap_month) + + @staticmethod + def today(): + return ZhDate.from_datetime(datetime.now()) + + def __days_passed(self): + """私有方法,计算当前农历日期和当年农历新年之间的天数差值 + + Returns: + int -- 差值天数 + """ + month_days = ZhDate.decode(self.year_code) + # 当前农历年的闰月,为0表示无润叶 + month_leap = self.year_code & 0xF + + # 当年无闰月,或者有闰月但是当前月小于闰月 + if (month_leap == 0) or (self.lunar_month < month_leap): + days_passed_month = sum(month_days[: self.lunar_month - 1]) + # 当前不是闰月,并且当前月份和闰月相同 + elif (not self.leap_month) and (self.lunar_month == month_leap): + days_passed_month = sum(month_days[: self.lunar_month - 1]) + else: + days_passed_month = sum(month_days[: self.lunar_month]) + + return days_passed_month + self.lunar_day - 1 + + def chinese(self): + ZHNUMS = "〇一二三四五六七八九十" + zh_year = "" + for i in range(0, 4): + zh_year += ZHNUMS[int(str(self.lunar_year)[i])] + + if self.leap_month: + zh_month = "闰" + else: + zh_month = "" + + if self.lunar_month == 1: + zh_month += "正" + elif self.lunar_month == 12: + zh_month += "腊" + elif self.lunar_month <= 10: + zh_month += ZHNUMS[self.lunar_month] + else: + zh_month += "十{}".format(ZHNUMS[self.lunar_month - 10]) + + if self.lunar_day <= 10: + zh_day = "初{}".format(ZHNUMS[self.lunar_day]) + elif self.lunar_day < 20: + zh_day = "十{}".format(ZHNUMS[self.lunar_day - 10]) + elif self.lunar_day == 20: + zh_day = "二十" + elif self.lunar_day < 30: + zh_day = "廿{}".format(ZHNUMS[self.lunar_day - 20]) + else: + zh_day = "三十" + + year_tiandi = ZhDate.__tiandi(self.lunar_year - 1900 + 36) + + shengxiao = "鼠牛虎兔龙蛇马羊猴鸡狗猪" + + return "{}年{}月{} {}{}年".format(zh_year, zh_month, zh_day, year_tiandi, shengxiao[(self.lunar_year - 1900) % 12]) + + def __str__(self): + """打印字符串的方法 + + Returns: + str -- 标准格式农历日期字符串 + """ + return "农历{}年{}{}月{}日".format(self.lunar_year, "闰" if self.leap_month else "", self.lunar_month, self.lunar_day) + + def __repr__(self): + return self.__str__() + + def __eq__(self, another): + if not isinstance(another, ZhDate): + raise TypeError("比较必须都是ZhDate类型") + cond1 = self.lunar_year == another.lunar_year + cond2 = self.lunar_month == another.lunar_month + cond3 = self.lunar_day == another.lunar_day + cond4 = self.leap_month == another.leap_month + return cond1 and cond2 and cond3 and cond4 + + def __add__(self, another): + if not isinstance(another, int): + raise TypeError("加法只支持整数天数相加") + return ZhDate.from_datetime(self.to_datetime() + timedelta(days=another)) + + def __sub__(self, another): + if isinstance(another, int): + return ZhDate.from_datetime(self.to_datetime() - timedelta(days=another)) + elif isinstance(another, ZhDate): + return (self.to_datetime() - another.to_datetime()).days + elif isinstance(another, datetime): + return (self.to_datetime() - another).days + else: + raise TypeError("减法只支持整数,ZhDate, Datetime类型") + + """ + 以下为帮助函数 + """ + + @staticmethod + def __tiandi(anum): + tian = "甲乙丙丁戊己庚辛壬癸" + di = "子丑寅卯辰巳午未申酉戌亥" + return "{}{}".format(tian[anum % 10], di[anum % 12]) + + @staticmethod + def validate(year, month, day, leap): + """农历日期校验 + + Arguments: + year {int} -- 农历年份 + month {int} -- 农历月份 + day {int} -- 农历日期 + leap {bool} -- 农历是否为闰月日期 + + Returns: + bool -- 校验是否通过 + """ + # 年份低于1900,大于2100,或者月份不属于 1-12,或者日期不属于 1-30,返回校验失败 + if not (1900 <= year <= 2100 and 1 <= month <= 12 and 1 <= day <= 30): + return False + + year_code = CHINESEYEARCODE[year - 1900] + + # 有闰月标志 + if leap: + if (year_code & 0xF) != month: # 年度闰月和校验闰月不一致的话,返回校验失败 + return False + elif day == 30: # 如果日期是30的话,直接返回年度代码首位是否为1,即闰月是否为大月 + return (year_code >> 16) == 1 + else: # 年度闰月和当前月份相同,日期不为30的情况,返回通过 + return True + elif day <= 29: # 非闰月,并且日期小于等于29,返回通过 + return True + else: # 非闰月日期为30,返回年度代码中的月份位是否为1,即是否为大月 + return ((year_code >> (12 - month) + 4) & 1) == 1 + + @staticmethod + def decode(year_code): + """解析年度农历代码函数 + + Arguments: + year_code {int} -- 从年度代码数组中获取的代码整数 + + Returns: + list[int, ] -- 当前年度代码解析以后形成的每月天数数组,已将闰月嵌入对应位置,即有闰月的年份返回的列表长度为13,否则为12 + """ + # 请问您为什么不在这么重要的地方写注释? + month_days = [] + for i in range(4, 16): + # 向右移动相应的位数 + # 1 这个数只有一位,与任何数进行 按位与 都只能获得其 + # 从后往前第一位,对!是获得这一位 + month_days.insert(0, 30 if (year_code >> i) & 1 else 29) + + # 0xf 即 15 即二进制的 1111 + # 所以 1111 与任何数进行 按位与 + # 都将获得其最后四位,对!是获得这最后四位 + # 后四位非0则表示有闰月(多一月),则插入一次月份 + # 而首四位表示闰月的天数 + if year_code & 0xF: + month_days.insert((year_code & 0xF), 30 if year_code >> 16 else 29) + + # 返回一个列表 + return month_days + + @staticmethod + def month_days(year): + """根据年份返回当前农历月份天数list + + Arguments: + year {int} -- 1900到2100的之间的整数 + + Returns: + [int] -- 农历年份所对应的农历月份天数列表 + """ + return ZhDate.decode(CHINESEYEARCODE[year - 1900]) + + +# the __all__ is generated +__all__ = [] diff --git a/src/zvt/ml/__init__.py b/src/zvt/ml/__init__.py new file mode 100644 index 00000000..bf9472ed --- /dev/null +++ b/src/zvt/ml/__init__.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +# the __all__ is generated +__all__ = [] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule lables +from .lables import * +from .lables import __all__ as _lables_all + +__all__ += _lables_all + +# import all from submodule ml +from .ml import * +from .ml import __all__ as _ml_all + +__all__ += _ml_all diff --git a/src/zvt/ml/lables.py b/src/zvt/ml/lables.py new file mode 100644 index 00000000..af6ffe7a --- /dev/null +++ b/src/zvt/ml/lables.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +from enum import Enum + + +class BehaviorCategory(Enum): + # 上涨 + up = 1 + # 下跌 + down = -1 + + +class RelativePerformance(Enum): + # 表现比90%好 + best = 0.9 + ordinary = 0.5 + poor = 0 + + +# the __all__ is generated +__all__ = ["BehaviorCategory", "RelativePerformance"] diff --git a/src/zvt/ml/ml.py b/src/zvt/ml/ml.py new file mode 100644 index 00000000..296e1cdd --- /dev/null +++ b/src/zvt/ml/ml.py @@ -0,0 +1,244 @@ +# -*- coding: utf-8 -*- +import logging +from typing import Union, Type, List + +import pandas as pd +from sklearn.linear_model import LinearRegression, SGDRegressor +from sklearn.pipeline import make_pipeline +from sklearn.preprocessing import StandardScaler + +from zvt.api.kdata import default_adjust_type, get_kdata +from zvt.contract import IntervalLevel, AdjustType +from zvt.contract import TradableEntity +from zvt.contract.drawer import Drawer +from zvt.domain import Stock +from zvt.factors.transformers import MaTransformer +from zvt.ml.lables import RelativePerformance, BehaviorCategory +from zvt.utils.pd_utils import group_by_entity_id, normalize_group_compute_result, pd_is_not_null +from zvt.utils.time_utils import to_pd_timestamp + +logger = logging.getLogger(__name__) + + +def cal_change(s: pd.Series, predict_range): + return s.pct_change(periods=-predict_range) + + +def cal_behavior_cls(s: pd.Series, predict_range): + return s.pct_change(periods=-predict_range).apply( + lambda x: BehaviorCategory.up.value if x > 0 else BehaviorCategory.down.value + ) + + +def cal_predict(s: pd.Series, predict_range): + return s.shift(periods=-predict_range) + + +def cal_relative_performance(s: pd.Series): + if s >= RelativePerformance.best.value: + return RelativePerformance.best + if s >= RelativePerformance.ordinary.value: + return RelativePerformance.ordinary + if s >= RelativePerformance.poor.value: + return RelativePerformance.poor + + +class MLMachine(object): + entity_schema: Type[TradableEntity] = None + + def __init__( + self, + entity_ids: List[str] = None, + start_timestamp: Union[str, pd.Timestamp] = "2015-01-01", + end_timestamp: Union[str, pd.Timestamp] = "2021-12-01", + predict_start_timestamp: Union[str, pd.Timestamp] = "2021-06-01", + predict_steps: int = 20, + level: Union[IntervalLevel, str] = IntervalLevel.LEVEL_1DAY, + adjust_type: Union[AdjustType, str] = None, + data_provider: str = None, + label_method: str = "raw", + ) -> None: + """ + + :param entity_ids: + :param start_timestamp: + :param end_timestamp: + :param predict_start_timestamp: + :param predict_steps: + :param level: + :param adjust_type: + :param data_provider: + :param label_method: raw, change, or behavior_cls + """ + super().__init__() + self.entity_ids = entity_ids + self.start_timestamp = to_pd_timestamp(start_timestamp) + self.end_timestamp = to_pd_timestamp(end_timestamp) + self.predict_start_timestamp = to_pd_timestamp(predict_start_timestamp) + assert self.start_timestamp < self.predict_start_timestamp < self.end_timestamp + self.predict_steps = predict_steps + + self.level = level + if not adjust_type: + adjust_type = default_adjust_type(entity_type=self.entity_schema.__name__) + self.adjust_type = adjust_type + + self.data_provider = data_provider + self.label_method = label_method + + self.kdata_df = self.build_kdata() + if not pd_is_not_null(self.kdata_df): + logger.error("not kdta") + assert False + + self.feature_df = self.build_feature(self.entity_ids, self.start_timestamp, self.end_timestamp) + # drop na in feature + self.feature_df = self.feature_df.dropna() + self.feature_names = list(set(self.feature_df.columns) - {"entity_id", "timestamp"}) + self.feature_df = self.feature_df.loc[:, self.feature_names] + + self.label_ser = self.build_label() + # keep same index with feature df + self.label_ser = self.label_ser.loc[self.feature_df.index] + self.label_name = self.label_ser.name + + self.training_X, self.training_y, self.testing_X, self.testing_y = self.split_data() + + logger.info(self.training_X) + logger.info(self.training_y) + + self.model = None + self.pred_y = None + + def split_data(self): + training_x = self.feature_df[self.feature_df.index.get_level_values("timestamp") < self.predict_start_timestamp] + training_y = self.label_ser[self.label_ser.index.get_level_values("timestamp") < self.predict_start_timestamp] + + testing_x = self.feature_df[self.feature_df.index.get_level_values("timestamp") >= self.predict_start_timestamp] + testing_y = self.label_ser[self.label_ser.index.get_level_values("timestamp") >= self.predict_start_timestamp] + return training_x, training_y, testing_x, testing_y + + def build_kdata(self): + columns = ["entity_id", "timestamp", "close"] + return get_kdata( + entity_ids=self.entity_ids, + start_timestamp=self.start_timestamp, + end_timestamp=self.end_timestamp, + columns=columns, + level=self.level, + adjust_type=self.adjust_type, + provider=self.data_provider, + index=["entity_id", "timestamp"], + drop_index_col=True, + ) + + def build_label(self): + label_name = f"y_{self.predict_steps}" + if self.label_method == "raw": + y = ( + group_by_entity_id(self.kdata_df["close"]) + .apply(lambda x: cal_predict(x, self.predict_steps)) + .rename(label_name) + ) + elif self.label_method == "change": + y = ( + group_by_entity_id(self.kdata_df["close"]) + .apply(lambda x: cal_change(x, self.predict_steps)) + .rename(label_name) + ) + elif self.label_method == "behavior_cls": + y = ( + group_by_entity_id(self.kdata_df["close"]) + .apply(lambda x: cal_behavior_cls(x, self.predict_steps)) + .rename(label_name) + ) + else: + assert False + y = normalize_group_compute_result(y) + + return y + + def train(self, model=LinearRegression(), **params): + self.model = model.fit(self.training_X, self.training_y, **params) + return self.model + + def draw_result(self, entity_id): + if self.label_method == "raw": + df = self.kdata_df.loc[[entity_id], ["close"]].copy() + + pred_df = self.pred_y.to_frame(name="pred_close") + pred_df = pred_df.loc[[entity_id], :].shift(self.predict_steps) + + drawer = Drawer( + main_df=df, + factor_df_list=[pred_df], + ) + drawer.draw_line(show=True) + else: + pred_df = self.pred_y.to_frame(name="pred_result").loc[[entity_id], :] + df = self.testing_y.to_frame(name="real_result").loc[[entity_id], :].join(pred_df, how="outer") + + drawer = Drawer(main_df=df) + drawer.draw_table() + + def predict(self): + predictions = self.model.predict(self.testing_X) + self.pred_y = pd.Series(data=predictions, index=self.testing_y.index) + # explained_variance_score(self.testing_y, self.pred_y) + # mean_squared_error(self.testing_y, self.pred_y) + + def build_feature( + self, entity_ids: List[str], start_timestamp: pd.Timestamp, end_timestamp: pd.Timestamp + ) -> pd.DataFrame: + """ + result df format + col1 col2 col3 ... + entity_id timestamp + 1.2 0.5 0.3 ... + 1.0 0.7 0.2 ... + + :param entity_ids: entity id list + :param start_timestamp: + :param end_timestamp: + :rtype: pd.DataFrame + """ + raise NotImplementedError + + +class StockMLMachine(MLMachine): + entity_schema = Stock + + +class MaStockMLMachine(StockMLMachine): + def build_feature( + self, entity_ids: List[str], start_timestamp: pd.Timestamp, end_timestamp: pd.Timestamp + ) -> pd.DataFrame: + """ + + :param entity_ids: + :param start_timestamp: + :param end_timestamp: + :return: + """ + t = MaTransformer(windows=[5, 10, 120, 250]) + df = t.transform(self.kdata_df) + return df + + +if __name__ == "__main__": + machine = MaStockMLMachine(entity_ids=["stock_sz_000001"]) + reg = make_pipeline(StandardScaler(), SGDRegressor(max_iter=1000, tol=1e-3)) + machine.train(model=reg) + machine.predict() + machine.draw_result(entity_id="stock_sz_000001") + +# the __all__ is generated +__all__ = [ + "cal_change", + "cal_behavior_cls", + "cal_predict", + "cal_relative_performance", + "MLMachine", + "StockMLMachine", + "MaStockMLMachine", +] diff --git a/zvt/plugin.py b/src/zvt/plugin.py similarity index 53% rename from zvt/plugin.py rename to src/zvt/plugin.py index d3cae64e..38809760 100644 --- a/zvt/plugin.py +++ b/src/zvt/plugin.py @@ -7,10 +7,10 @@ def main(): parser = argparse.ArgumentParser() - parser.add_argument('--entity', help='entity name', default='future') - parser.add_argument('--prefix', help='project prefix', default='zvt') - parser.add_argument('--dir', help='project directory', default='.') - parser.add_argument('--providers', help='providers', default=['joinquant'], nargs='+') + parser.add_argument("--entity", help="entity name", default="future") + parser.add_argument("--prefix", help="project prefix", default="zvt") + parser.add_argument("--dir", help="project directory", default=".") + parser.add_argument("--providers", help="providers", default=["joinquant"], nargs="+") args = parser.parse_args() @@ -23,12 +23,12 @@ def main(): def export(): parser = argparse.ArgumentParser() - parser.add_argument('--dir', help='export directory', default='.') + parser.add_argument("--dir", help="export directory", default=".") args = parser.parse_args() dir_path = args.dir gen_exports(dir_path=dir_path) -if __name__ == '__main__': - gen_plugin_project(dir_path='../../', entity_type='coin', providers=['ccxt']) +if __name__ == "__main__": + gen_plugin_project(dir_path="../../../", entity_type="macro", providers=["zvt"]) main() diff --git a/src/zvt/recorders/__init__.py b/src/zvt/recorders/__init__.py new file mode 100644 index 00000000..40099337 --- /dev/null +++ b/src/zvt/recorders/__init__.py @@ -0,0 +1,122 @@ +# -*- coding: utf-8 -*- + +CHINA_STOCK_MAIN_INDEX = [ + # # 聚宽编码 + # # 市场通编码 市场通名称 + # # 310001 沪股通 + # # 310002 深股通 + # # 310003 港股通(沪) + # # 310004 港股通(深) + { + "id": "index_cn_310001", + "entity_id": "index_cn_310001", + "code": "310001", + "name": "沪股通", + "timestamp": "2014-11-17", + "exchange": "cn", + "entity_type": "index", + "category": "other", + }, + { + "id": "index_cn_310002", + "entity_id": "index_cn_310002", + "code": "310002", + "name": "深股通", + "timestamp": "2014-11-17", + "exchange": "cn", + "entity_type": "index", + "category": "other", + }, + { + "id": "index_cn_310003", + "entity_id": "index_cn_310003", + "code": "310003", + "name": "港股通(沪)", + "timestamp": "2014-11-17", + "exchange": "cn", + "entity_type": "index", + "category": "other", + }, + { + "id": "index_cn_310004", + "entity_id": "index_cn_310004", + "code": "310004", + "name": "港股通(深)", + "timestamp": "2014-11-17", + "exchange": "cn", + "entity_type": "index", + "category": "other", + }, +] + + +def init_main_index(provider="exchange"): + from zvt.utils.time_utils import to_pd_timestamp + import pandas as pd + from zvt.contract.api import df_to_db + from zvt.domain.meta import Index + + for item in CHINA_STOCK_MAIN_INDEX: + item["timestamp"] = to_pd_timestamp(item["timestamp"]) + df = pd.DataFrame(CHINA_STOCK_MAIN_INDEX) + # print(df) + df_to_db(df=df, data_schema=Index, provider=provider, force_update=False) + + +init_main_index(provider="exchange") + + +# the __all__ is generated +__all__ = ["init_main_index"] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule sina +from .sina import * +from .sina import __all__ as _sina_all + +__all__ += _sina_all + +# import all from submodule jqka +from .jqka import * +from .jqka import __all__ as _jqka_all + +__all__ += _jqka_all + +# import all from submodule consts +from .consts import * +from .consts import __all__ as _consts_all + +__all__ += _consts_all + +# import all from submodule eastmoney +from .eastmoney import * +from .eastmoney import __all__ as _eastmoney_all + +__all__ += _eastmoney_all + +# import all from submodule joinquant +from .joinquant import * +from .joinquant import __all__ as _joinquant_all + +__all__ += _joinquant_all + +# import all from submodule exchange +from .exchange import * +from .exchange import __all__ as _exchange_all + +__all__ += _exchange_all + +# import all from submodule wb +from .wb import * +from .wb import __all__ as _wb_all + +__all__ += _wb_all + +# import all from submodule em +from .em import * +from .em import __all__ as _em_all + +__all__ += _em_all diff --git a/zvt/recorders/consts.py b/src/zvt/recorders/consts.py similarity index 90% rename from zvt/recorders/consts.py rename to src/zvt/recorders/consts.py index 52e7c337..b39b3c28 100644 --- a/zvt/recorders/consts.py +++ b/src/zvt/recorders/consts.py @@ -1,7 +1,8 @@ # -*- coding: utf-8 -*- from zvt.utils.utils import chrome_copy_header_to_dict -SSE_KDATA_HEADER = chrome_copy_header_to_dict(''' +SSE_KDATA_HEADER = chrome_copy_header_to_dict( + """ Host: yunhq.sse.com.cn:32041 Connection: keep-alive Pragma: no-cache @@ -12,9 +13,11 @@ Accept-Encoding: gzip, deflate Accept-Language: zh-CN,zh;q=0.8,en;q=0.6 Cookie: yfx_c_g_u_id_10000042=_ck17072000172016360411059933357; yfx_f_l_v_t_10000042=f_t_1500481040618__r_t_1507560823182__v_t_1507561607501__r_c_1; VISITED_MENU=%5B%228451%22%2C%228453%22%5D -''') +""" +) -DEFAULT_SH_HEADER = chrome_copy_header_to_dict(''' +DEFAULT_SH_HEADER = chrome_copy_header_to_dict( + """ Accept: */* Accept-Encoding: gzip, deflate Accept-Language: zh-CN,zh;q=0.9,en;q=0.8 @@ -23,9 +26,11 @@ Host: query.sse.com.cn Referer: http://www.sse.com.cn/ User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.212 Safari/537.36 -''') +""" +) -DEFAULT_SZ_HEADER = chrome_copy_header_to_dict(''' +DEFAULT_SZ_HEADER = chrome_copy_header_to_dict( + """ Accept:text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8 Accept-Encoding:gzip, deflate, sdch Accept-Language:zh-CN,zh;q=0.8,en;q=0.6 @@ -34,9 +39,11 @@ Referer:http://www.szse.cn/main/marketdata/jypz/colist/ Upgrade-Insecure-Requests:1 User-Agent:Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2490.76 Mobile Safari/537.36 -''') +""" +) -DEFAULT_TICK_HEADER = chrome_copy_header_to_dict(''' +DEFAULT_TICK_HEADER = chrome_copy_header_to_dict( + """ Host: market.finance.sina.com.cn Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8 Accept-Encoding: gzip, deflate, sdch @@ -45,9 +52,11 @@ Referer:market.finance.sina.com.cn Upgrade-Insecure-Requests: 1 User-Agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.82 Safari/537.36 -''') +""" +) -DEFAULT_KDATA_HEADER = chrome_copy_header_to_dict(''' +DEFAULT_KDATA_HEADER = chrome_copy_header_to_dict( + """ Accept:text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8 Accept-Encoding:gzip, deflate, sdch Accept-Language:zh-CN,zh;q=0.8,en;q=0.6 @@ -57,9 +66,11 @@ Referer:vip.stock.finance.sina.com.cn Upgrade-Insecure-Requests:1 User-Agent:Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2490.76 Mobile Safari/537.36 -''') +""" +) -TONGHUASHUN_GN_HEADER = chrome_copy_header_to_dict(''' +TONGHUASHUN_GN_HEADER = chrome_copy_header_to_dict( + """ Accept:text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8 Accept-Encoding:gzip, deflate, sdch Accept-Language:zh-CN,zh;q=0.8,en;q=0.6 @@ -69,9 +80,11 @@ Host:q.10jqka.com.cn Upgrade-Insecure-Requests:1 User-Agent:Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2490.76 Mobile Safari/537.36 -''') +""" +) -TONGHUASHUN_KDATA_HEADER = chrome_copy_header_to_dict(''' +TONGHUASHUN_KDATA_HEADER = chrome_copy_header_to_dict( + """ Accept-Encoding: gzip, deflate Accept-Language: zh-CN,zh;q=0.8,en;q=0.6 User-Agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.100 Safari/537.36 @@ -79,9 +92,11 @@ Referer: http://stockpage.10jqka.com.cn/HQ_v4.html Cookie: __utma=156575163.1843700306.1488352720.1499234323.1502172029.4; __utmc=156575163; __utmz=156575163.1488352720.1.1.utmcsr=(direct)|utmccn=(direct)|utmcmd=(none); spversion=20130314; Hm_lvt_78c58f01938e4d85eaf619eae71b4ed1=1507300869; Hm_lpvt_78c58f01938e4d85eaf619eae71b4ed1=1508464591; historystock=603189%7C*%7C300295%7C*%7C600839%7C*%7C000338%7C*%7C002194; log=; v=AREjaxfGLPcJoUDd5wHRp1QiKRaufoSAL_MpD_OlDDL35T_CO86VwL9CPa2D Connection: keep-alive -''') +""" +) -DEFAULT_BALANCE_SHEET_HEADER = chrome_copy_header_to_dict(''' +DEFAULT_BALANCE_SHEET_HEADER = chrome_copy_header_to_dict( + """ Host: money.finance.sina.com.cn User-Agent: Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:54.0) Gecko/20100101 Firefox/54.0 Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8 @@ -91,9 +106,11 @@ Cookie: U_TRS1=000000be.c95848c3.59817e10.a54886e2; U_TRS2=000000be.c96a48c3.59817e10.a91795e2; UOR=,vip.stock.finance.sina.com.cn,; ULV=1501658645426:2:2:2:182.148.114.190_1501658642.469995:1501658642409; SINAGLOBAL=182.148.114.190_1501658642.469991; Apache=182.148.114.190_1501658642.469995; _s_upa=1; SUB=_2A250hQ5nDeRhGedL4lQQ8ivPzziIHXVX83ivrDV_PUNbm9BeLXigkW8-niaOks2yNkw8lYo-TvoqGk6nRA..; SUBP=0033WrSXqPxfM725Ws9jqgMF55529P9D9W5J7vXaOMNYJQ0d27EZPWIF5NHD95QpSK.ceKzfe0BXWs4Dqcj.i--fi-zRiKnEi--fiKLhi-iWi--Xi-isi-88i--Ri-2piKyh; SCF=At4whqZZyjTBTvcLfR0tyqIpfHUX2VOK-qvBVHkbyahiCVcr4-8NjJQGHwCaTtkQJ0SPmrzvZARwtEkL1I_46z8.; ALF=1533194679; sso_info=v02m6alo5qztbaYloWum6akpp2WpaSPk4S1jpOYsYyjlLONg5DA; FINANCE2=f7634b1d12920e2763ffc0dc463ef6bb Connection: keep-alive Upgrade-Insecure-Requests: 1 -''') +""" +) -DEFAULT_SH_SUMMARY_HEADER = chrome_copy_header_to_dict(''' +DEFAULT_SH_SUMMARY_HEADER = chrome_copy_header_to_dict( + """ Host: query.sse.com.cn Connection: keep-alive User-Agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.100 Safari/537.36 @@ -102,9 +119,11 @@ Accept-Encoding: gzip, deflate Accept-Language: zh-CN,zh;q=0.8,en;q=0.6 Cookie: yfx_c_g_u_id_10000042=_ck17122009304714819234313401740; VISITED_COMPANY_CODE=%5B%22000016%22%5D; VISITED_INDEX_CODE=%5B%22000016%22%5D; yfx_f_l_v_t_10000042=f_t_1513733447386__r_t_1515716891222__v_t_1515721033042__r_c_3; VISITED_MENU=%5B%228464%22%2C%229666%22%2C%229668%22%2C%229669%22%2C%228454%22%2C%228460%22%2C%229665%22%2C%228459%22%2C%229692%22%2C%228451%22%2C%228466%22%5D -''') +""" +) -DEFAULT_SH_ETF_LIST_HEADER = chrome_copy_header_to_dict(''' +DEFAULT_SH_ETF_LIST_HEADER = chrome_copy_header_to_dict( + """ Host: query.sse.com.cn User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36 Accept: */* @@ -113,9 +132,22 @@ Accept-Language: zh-CN,zh;q=0.9 Cookie: yfx_c_g_u_id_10000042=_ck19062609443812815766114343798; VISITED_COMPANY_CODE=%5B%22510300%22%5D; VISITED_FUND_CODE=%5B%22510300%22%5D; VISITED_MENU=%5B%228307%22%2C%228823%22%2C%228547%22%2C%228556%22%2C%228549%22%2C%2210848%22%2C%228550%22%5D; yfx_f_l_v_t_10000042=f_t_1561513478278__r_t_1561692626758__v_t_1561695738302__r_c_1 Connection: keep-alive -''') +""" +) -EASTMONEY_ETF_NET_VALUE_HEADER = chrome_copy_header_to_dict(''' +EASTMONEY_ETF_NET_VALUE_HEADER = chrome_copy_header_to_dict( + """ User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36 Referer: http://fund.eastmoney.com/ -''') +""" +) + +DEFAULT_HEADER = chrome_copy_header_to_dict( + """ +User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36 +""" +) + + +# the __all__ is generated +__all__ = [] diff --git a/src/zvt/recorders/eastmoney/__init__.py b/src/zvt/recorders/eastmoney/__init__.py new file mode 100644 index 00000000..2eb5778f --- /dev/null +++ b/src/zvt/recorders/eastmoney/__init__.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*-# + + +# the __all__ is generated +__all__ = [] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule holder +from .holder import * +from .holder import __all__ as _holder_all + +__all__ += _holder_all + +# import all from submodule trading +from .trading import * +from .trading import __all__ as _trading_all + +__all__ += _trading_all + +# import all from submodule finance +from .finance import * +from .finance import __all__ as _finance_all + +__all__ += _finance_all + +# import all from submodule common +from .common import * +from .common import __all__ as _common_all + +__all__ += _common_all + +# import all from submodule dividend_financing +from .dividend_financing import * +from .dividend_financing import __all__ as _dividend_financing_all + +__all__ += _dividend_financing_all + +# import all from submodule meta +from .meta import * +from .meta import __all__ as _meta_all + +__all__ += _meta_all diff --git a/zvt/recorders/eastmoney/common.py b/src/zvt/recorders/eastmoney/common.py similarity index 60% rename from zvt/recorders/eastmoney/common.py rename to src/zvt/recorders/eastmoney/common.py index 074cfd74..a80aff44 100644 --- a/zvt/recorders/eastmoney/common.py +++ b/src/zvt/recorders/eastmoney/common.py @@ -5,33 +5,34 @@ from zvt.contract.api import get_data_count, get_data from zvt.contract.recorder import TimestampsDataRecorder, TimeSeriesDataRecorder +from zvt.domain import CompanyType +from zvt.domain.meta.stock_meta import StockDetail from zvt.utils.time_utils import to_pd_timestamp -from zvt.domain import CompanyType, Stock, StockDetail logger = logging.getLogger(__name__) class ApiWrapper(object): - def request(self, url=None, method='post', param=None, path_fields=None): + def request(self, url=None, method="post", param=None, path_fields=None): raise NotImplementedError def get_fc(security_item): - if security_item.exchange == 'sh': + if security_item.exchange == "sh": fc = "{}01".format(security_item.code) - if security_item.exchange == 'sz': + if security_item.exchange == "sz": fc = "{}02".format(security_item.code) return fc def get_company_type(stock_domain: StockDetail): - industries = stock_domain.industries.split(',') - if ('银行' in industries) or ('信托' in industries): + industries = stock_domain.industries.split(",") + if ("银行" in industries) or ("信托" in industries): return CompanyType.yinhang - if '保险' in industries: + if "保险" in industries: return CompanyType.baoxian - if '证券' in industries: + if "证券" in industries: return CompanyType.quanshang return CompanyType.qiye @@ -51,39 +52,37 @@ def company_type_flag(security_item): except Exception as e: logger.warning(e) - param = { - "color": "w", - "fc": get_fc(security_item) - } + param = {"color": "w", "fc": get_fc(security_item)} - resp = requests.post('https://emh5.eastmoney.com/api/CaiWuFenXi/GetCompanyType', json=param) + resp = requests.post("https://emh5.eastmoney.com/api/CaiWuFenXi/GetCompanyType", json=param) - ct = resp.json().get('Result').get('CompanyType') + ct = resp.json().get("Result").get("CompanyType") logger.warning("{} not catching company type:{}".format(security_item, ct)) return ct -def call_eastmoney_api(url=None, method='post', param=None, path_fields=None): - if method == 'post': +def call_eastmoney_api(url=None, method="post", param=None, path_fields=None): + if method == "post": resp = requests.post(url, json=param) - resp.encoding = 'utf8' + resp.encoding = "utf-8" try: - origin_result = resp.json().get('Result') + origin_result = resp.json().get("Result") except Exception as e: - logger.exception('code:{},content:{}'.format(resp.status_code, resp.text)) + logger.exception("code:{},content:{}".format(resp.status_code, resp.text)) raise e if path_fields: the_data = get_from_path_fields(origin_result, path_fields) if not the_data: logger.warning( - "url:{},param:{},origin_result:{},could not get data for nested_fields:{}".format(url, param, - origin_result, - path_fields)) + "url:{},param:{},origin_result:{},could not get data for nested_fields:{}".format( + url, param, origin_result, path_fields + ) + ) return the_data return origin_result @@ -100,12 +99,12 @@ def get_from_path_fields(the_json, path_fields): class EastmoneyApiWrapper(ApiWrapper): - def request(self, url=None, method='post', param=None, path_fields=None): + def request(self, url=None, method="post", param=None, path_fields=None): return call_eastmoney_api(url=url, method=method, param=param, path_fields=path_fields) class BaseEastmoneyRecorder(object): - request_method = 'post' + request_method = "post" path_fields = None api_wrapper = EastmoneyApiWrapper() @@ -117,44 +116,43 @@ def record(self, entity_item, start, end, size, timestamps): original_list = [] for the_timestamp in timestamps: param = self.generate_request_param(entity_item, start, end, size, the_timestamp) - tmp_list = self.api_wrapper.request(url=self.url, param=param, method=self.request_method, - path_fields=self.path_fields) + tmp_list = self.api_wrapper.request( + url=self.url, param=param, method=self.request_method, path_fields=self.path_fields + ) self.logger.info( - "record {} for entity_id:{},timestamp:{}".format( - self.data_schema, entity_item.id, the_timestamp)) + "record {} for entity_id:{},timestamp:{}".format(self.data_schema, entity_item.id, the_timestamp) + ) # fill timestamp field for tmp in tmp_list: tmp[self.get_evaluated_time_field()] = the_timestamp original_list += tmp_list - if len(original_list) == self.batch_size: + if len(original_list) == 50: break return original_list else: param = self.generate_request_param(entity_item, start, end, size, None) - return self.api_wrapper.request(url=self.url, param=param, method=self.request_method, - path_fields=self.path_fields) + return self.api_wrapper.request( + url=self.url, param=param, method=self.request_method, path_fields=self.path_fields + ) class EastmoneyTimestampsDataRecorder(BaseEastmoneyRecorder, TimestampsDataRecorder): - entity_provider = 'joinquant' + entity_provider = "eastmoney" entity_schema = StockDetail - provider = 'eastmoney' + provider = "eastmoney" timestamps_fetching_url = None timestamp_list_path_fields = None timestamp_path_fields = None def init_timestamps(self, entity): - param = { - "color": "w", - "fc": get_fc(entity) - } + param = {"color": "w", "fc": get_fc(entity)} - timestamp_json_list = call_eastmoney_api(url=self.timestamps_fetching_url, - path_fields=self.timestamp_list_path_fields, - param=param) + timestamp_json_list = call_eastmoney_api( + url=self.timestamps_fetching_url, path_fields=self.timestamp_list_path_fields, param=param + ) if self.timestamp_path_fields and timestamp_json_list: timestamps = [get_from_path_fields(data, self.timestamp_path_fields) for data in timestamp_json_list] @@ -163,21 +161,16 @@ def init_timestamps(self, entity): class EastmoneyPageabeDataRecorder(BaseEastmoneyRecorder, TimeSeriesDataRecorder): - entity_provider = 'joinquant' + entity_provider = "eastmoney" entity_schema = StockDetail - provider = 'eastmoney' + provider = "eastmoney" page_url = None def get_remote_count(self, security_item): - param = { - "color": "w", - "fc": get_fc(security_item), - "pageNum": 1, - "pageSize": 1 - } - return call_eastmoney_api(self.page_url, param=param, path_fields=['TotalCount']) + param = {"color": "w", "fc": get_fc(security_item), "pageNum": 1, "pageSize": 1} + return call_eastmoney_api(self.page_url, param=param, path_fields=["TotalCount"]) def evaluate_start_end_size_timestamps(self, entity): remote_count = self.get_remote_count(entity) @@ -186,8 +179,9 @@ def evaluate_start_end_size_timestamps(self, entity): return None, None, 0, None # get local count - local_count = get_data_count(data_schema=self.data_schema, session=self.session, - filters=[self.data_schema.entity_id == entity.id]) + local_count = get_data_count( + data_schema=self.data_schema, session=self.session, filters=[self.data_schema.entity_id == entity.id] + ) # FIXME:the > case if local_count >= remote_count: return None, None, 0, None @@ -198,41 +192,38 @@ def generate_request_param(self, security_item, start, end, size, timestamp): return { "color": "w", "fc": get_fc(security_item), - 'pageNum': 1, + "pageNum": 1, # just get more for some fixed data - 'pageSize': size + 10 + "pageSize": size + 10, } class EastmoneyMoreDataRecorder(BaseEastmoneyRecorder, TimeSeriesDataRecorder): - entity_provider = 'joinquant' + entity_provider = "eastmoney" entity_schema = StockDetail - provider = 'eastmoney' + provider = "eastmoney" def get_remote_latest_record(self, security_item): - param = { - "color": "w", - "fc": get_fc(security_item), - "pageNum": 1, - "pageSize": 1 - } + param = {"color": "w", "fc": get_fc(security_item), "pageNum": 1, "pageSize": 1} results = call_eastmoney_api(self.url, param=param, path_fields=self.path_fields) _, result = self.generate_domain(security_item, results[0]) return result def evaluate_start_end_size_timestamps(self, entity): # get latest record - latest_record = get_data(entity_id=entity.id, - provider=self.provider, - data_schema=self.data_schema, - order=self.data_schema.timestamp.desc(), limit=1, - return_type='domain', - session=self.session) + latest_record = get_data( + entity_id=entity.id, + provider=self.provider, + data_schema=self.data_schema, + order=self.data_schema.timestamp.desc(), + limit=1, + return_type="domain", + session=self.session, + ) if latest_record: remote_record = self.get_remote_latest_record(entity) - if not remote_record or ( - latest_record[0].id == remote_record.id): + if not remote_record or (latest_record[0].id == remote_record.id): return None, None, 0, None else: return None, None, 10, None @@ -240,9 +231,20 @@ def evaluate_start_end_size_timestamps(self, entity): return None, None, 1000, None def generate_request_param(self, security_item, start, end, size, timestamp): - return { - "color": "w", - "fc": get_fc(security_item), - 'pageNum': 1, - 'pageSize': size - } + return {"color": "w", "fc": get_fc(security_item), "pageNum": 1, "pageSize": size} + + +# the __all__ is generated +__all__ = [ + "ApiWrapper", + "get_fc", + "get_company_type", + "company_type_flag", + "call_eastmoney_api", + "get_from_path_fields", + "EastmoneyApiWrapper", + "BaseEastmoneyRecorder", + "EastmoneyTimestampsDataRecorder", + "EastmoneyPageabeDataRecorder", + "EastmoneyMoreDataRecorder", +] diff --git a/src/zvt/recorders/eastmoney/dividend_financing/__init__.py b/src/zvt/recorders/eastmoney/dividend_financing/__init__.py new file mode 100644 index 00000000..1ad4e6d7 --- /dev/null +++ b/src/zvt/recorders/eastmoney/dividend_financing/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- + + +# the __all__ is generated +__all__ = [] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule eastmoney_rights_issue_detail_recorder +from .eastmoney_rights_issue_detail_recorder import * +from .eastmoney_rights_issue_detail_recorder import __all__ as _eastmoney_rights_issue_detail_recorder_all + +__all__ += _eastmoney_rights_issue_detail_recorder_all + +# import all from submodule eastmoney_dividend_detail_recorder +from .eastmoney_dividend_detail_recorder import * +from .eastmoney_dividend_detail_recorder import __all__ as _eastmoney_dividend_detail_recorder_all + +__all__ += _eastmoney_dividend_detail_recorder_all + +# import all from submodule eastmoney_spo_detail_recorder +from .eastmoney_spo_detail_recorder import * +from .eastmoney_spo_detail_recorder import __all__ as _eastmoney_spo_detail_recorder_all + +__all__ += _eastmoney_spo_detail_recorder_all + +# import all from submodule eastmoney_dividend_financing_recorder +from .eastmoney_dividend_financing_recorder import * +from .eastmoney_dividend_financing_recorder import __all__ as _eastmoney_dividend_financing_recorder_all + +__all__ += _eastmoney_dividend_financing_recorder_all diff --git a/zvt/recorders/eastmoney/dividend_financing/dividend_detail_recorder.py b/src/zvt/recorders/eastmoney/dividend_financing/eastmoney_dividend_detail_recorder.py similarity index 68% rename from zvt/recorders/eastmoney/dividend_financing/dividend_detail_recorder.py rename to src/zvt/recorders/eastmoney/dividend_financing/eastmoney_dividend_detail_recorder.py index f8bd3fed..3ede9dc3 100644 --- a/zvt/recorders/eastmoney/dividend_financing/dividend_detail_recorder.py +++ b/src/zvt/recorders/eastmoney/dividend_financing/eastmoney_dividend_detail_recorder.py @@ -1,18 +1,18 @@ # -*- coding: utf-8 -*- -from zvt.utils.time_utils import to_pd_timestamp from zvt.domain import DividendDetail from zvt.recorders.eastmoney.common import EastmoneyPageabeDataRecorder +from zvt.utils.time_utils import to_pd_timestamp class DividendDetailRecorder(EastmoneyPageabeDataRecorder): data_schema = DividendDetail - url = 'https://emh5.eastmoney.com/api/FenHongRongZi/GetFenHongSongZhuanList' + url = "https://emh5.eastmoney.com/api/FenHongRongZi/GetFenHongSongZhuanList" page_url = url - path_fields = ['FenHongSongZhuanList'] + path_fields = ["FenHongSongZhuanList"] def get_original_time_field(self): - return 'GongGaoRiQi' + return "GongGaoRiQi" def get_data_map(self): return { @@ -23,14 +23,16 @@ def get_data_map(self): # 除权除息日 "dividend_date": ("ChuQuanChuXiRi", to_pd_timestamp), # 方案 - "dividend": ("FengHongFangAn", str) + "dividend": ("FengHongFangAn", str), } -__all__ = ['DividendDetailRecorder'] - -if __name__ == '__main__': +if __name__ == "__main__": # init_log('dividend_detail.log') - recorder = DividendDetailRecorder(codes=['601318']) + recorder = DividendDetailRecorder(codes=["601318"]) recorder.run() + + +# the __all__ is generated +__all__ = ["DividendDetailRecorder"] diff --git a/zvt/recorders/eastmoney/dividend_financing/dividend_financing_recorder.py b/src/zvt/recorders/eastmoney/dividend_financing/eastmoney_dividend_financing_recorder.py similarity index 58% rename from zvt/recorders/eastmoney/dividend_financing/dividend_financing_recorder.py rename to src/zvt/recorders/eastmoney/dividend_financing/eastmoney_dividend_financing_recorder.py index 542be5f9..f46b8d11 100644 --- a/zvt/recorders/eastmoney/dividend_financing/dividend_financing_recorder.py +++ b/src/zvt/recorders/eastmoney/dividend_financing/eastmoney_dividend_financing_recorder.py @@ -7,12 +7,12 @@ class DividendFinancingRecorder(EastmoneyPageabeDataRecorder): data_schema = DividendFinancing - url = 'https://emh5.eastmoney.com/api/FenHongRongZi/GetLiNianFenHongRongZiList' + url = "https://emh5.eastmoney.com/api/FenHongRongZi/GetLiNianFenHongRongZiList" page_url = url - path_fields = ['LiNianFenHongRongZiList'] + path_fields = ["LiNianFenHongRongZiList"] def get_original_time_field(self): - return 'ShiJian' + return "ShiJian" def get_data_map(self): return { @@ -23,7 +23,7 @@ def get_data_map(self): # 增发 "spo_issues": ("ZengFa", second_item_to_float), # 配股 - "rights_issues": ("PeiFa", second_item_to_float) + "rights_issues": ("PeiFa", second_item_to_float), } def on_finish(self): @@ -32,12 +32,13 @@ def on_finish(self): for item in self.entities: code_security[item.code] = item - need_fill_items = DividendFinancing.query_data(provider=self.provider, codes=list(code_security.keys()), - return_type='domain', - session=self.session, - filters=[ - DividendFinancing.ipo_raising_fund.is_(None), - DividendFinancing.ipo_issues != 0]) + need_fill_items = DividendFinancing.query_data( + provider=self.provider, + codes=list(code_security.keys()), + return_type="domain", + session=self.session, + filters=[DividendFinancing.ipo_raising_fund.is_(None), DividendFinancing.ipo_issues != 0], + ) for need_fill_item in need_fill_items: if need_fill_item: @@ -49,10 +50,12 @@ def on_finish(self): super().on_finish() -__all__ = ['DividendFinancingRecorder'] - -if __name__ == '__main__': +if __name__ == "__main__": # init_log('dividend_financing.log') - recorder = DividendFinancingRecorder(codes=['000999']) + recorder = DividendFinancingRecorder(codes=["000999"]) recorder.run() + + +# the __all__ is generated +__all__ = ["DividendFinancingRecorder"] diff --git a/src/zvt/recorders/eastmoney/dividend_financing/eastmoney_rights_issue_detail_recorder.py b/src/zvt/recorders/eastmoney/dividend_financing/eastmoney_rights_issue_detail_recorder.py new file mode 100644 index 00000000..d5b35d3c --- /dev/null +++ b/src/zvt/recorders/eastmoney/dividend_financing/eastmoney_rights_issue_detail_recorder.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +from zvt.consts import SAMPLE_STOCK_CODES +from zvt.domain import RightsIssueDetail, DividendFinancing +from zvt.recorders.eastmoney.common import EastmoneyPageabeDataRecorder +from zvt.utils.pd_utils import pd_is_not_null +from zvt.utils.time_utils import now_pd_timestamp +from zvt.utils.utils import to_float + + +class RightsIssueDetailRecorder(EastmoneyPageabeDataRecorder): + data_schema = RightsIssueDetail + + url = "https://emh5.eastmoney.com/api/FenHongRongZi/GetPeiGuMingXiList" + page_url = url + path_fields = ["PeiGuMingXiList"] + + def get_original_time_field(self): + return "PeiGuGongGaoRi" + + def get_data_map(self): + return { + "rights_issues": ("ShiJiPeiGu", to_float), + "rights_issue_price": ("PeiGuJiaGe", to_float), + "rights_raising_fund": ("ShiJiMuJi", to_float), + } + + def on_finish(self): + last_year = str(now_pd_timestamp().year) + codes = [item.code for item in self.entities] + need_filleds = DividendFinancing.query_data( + provider=self.provider, + codes=codes, + return_type="domain", + session=self.session, + filters=[DividendFinancing.rights_raising_fund.is_(None)], + end_timestamp=last_year, + ) + + for item in need_filleds: + df = RightsIssueDetail.query_data( + provider=self.provider, + entity_id=item.entity_id, + columns=[RightsIssueDetail.timestamp, RightsIssueDetail.rights_raising_fund], + start_timestamp=item.timestamp, + end_timestamp="{}-12-31".format(item.timestamp.year), + ) + if pd_is_not_null(df): + item.rights_raising_fund = df["rights_raising_fund"].sum() + self.session.commit() + + super().on_finish() + + +if __name__ == "__main__": + # init_log('rights_issue.log') + + recorder = RightsIssueDetailRecorder(codes=SAMPLE_STOCK_CODES) + recorder.run() + + +# the __all__ is generated +__all__ = ["RightsIssueDetailRecorder"] diff --git a/src/zvt/recorders/eastmoney/dividend_financing/eastmoney_spo_detail_recorder.py b/src/zvt/recorders/eastmoney/dividend_financing/eastmoney_spo_detail_recorder.py new file mode 100644 index 00000000..939c80b8 --- /dev/null +++ b/src/zvt/recorders/eastmoney/dividend_financing/eastmoney_spo_detail_recorder.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +from zvt.domain import SpoDetail, DividendFinancing +from zvt.recorders.eastmoney.common import EastmoneyPageabeDataRecorder +from zvt.utils.pd_utils import pd_is_not_null +from zvt.utils.time_utils import now_pd_timestamp +from zvt.utils.utils import to_float + + +class SPODetailRecorder(EastmoneyPageabeDataRecorder): + data_schema = SpoDetail + + url = "https://emh5.eastmoney.com/api/FenHongRongZi/GetZengFaMingXiList" + page_url = url + path_fields = ["ZengFaMingXiList"] + + def get_original_time_field(self): + return "ZengFaShiJian" + + def get_data_map(self): + return { + "spo_issues": ("ShiJiZengFa", to_float), + "spo_price": ("ZengFaJiaGe", to_float), + "spo_raising_fund": ("ShiJiMuJi", to_float), + } + + def on_finish(self): + last_year = str(now_pd_timestamp().year) + codes = [item.code for item in self.entities] + need_filleds = DividendFinancing.query_data( + provider=self.provider, + codes=codes, + return_type="domain", + session=self.session, + filters=[DividendFinancing.spo_raising_fund.is_(None)], + end_timestamp=last_year, + ) + + for item in need_filleds: + df = SpoDetail.query_data( + provider=self.provider, + entity_id=item.entity_id, + columns=[SpoDetail.timestamp, SpoDetail.spo_raising_fund], + start_timestamp=item.timestamp, + end_timestamp="{}-12-31".format(item.timestamp.year), + ) + if pd_is_not_null(df): + item.spo_raising_fund = df["spo_raising_fund"].sum() + self.session.commit() + super().on_finish() + + +if __name__ == "__main__": + # init_log('spo_detail.log') + + recorder = SPODetailRecorder(codes=["000999"]) + recorder.run() + + +# the __all__ is generated +__all__ = ["SPODetailRecorder"] diff --git a/src/zvt/recorders/eastmoney/finance/__init__.py b/src/zvt/recorders/eastmoney/finance/__init__.py new file mode 100644 index 00000000..6b9c423c --- /dev/null +++ b/src/zvt/recorders/eastmoney/finance/__init__.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*-# + + +# the __all__ is generated +__all__ = [] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule eastmoney_finance_factor_recorder +from .eastmoney_finance_factor_recorder import * +from .eastmoney_finance_factor_recorder import __all__ as _eastmoney_finance_factor_recorder_all + +__all__ += _eastmoney_finance_factor_recorder_all + +# import all from submodule eastmoney_cash_flow_recorder +from .eastmoney_cash_flow_recorder import * +from .eastmoney_cash_flow_recorder import __all__ as _eastmoney_cash_flow_recorder_all + +__all__ += _eastmoney_cash_flow_recorder_all + +# import all from submodule eastmoney_income_statement_recorder +from .eastmoney_income_statement_recorder import * +from .eastmoney_income_statement_recorder import __all__ as _eastmoney_income_statement_recorder_all + +__all__ += _eastmoney_income_statement_recorder_all + +# import all from submodule base_china_stock_finance_recorder +from .base_china_stock_finance_recorder import * +from .base_china_stock_finance_recorder import __all__ as _base_china_stock_finance_recorder_all + +__all__ += _base_china_stock_finance_recorder_all + +# import all from submodule eastmoney_balance_sheet_recorder +from .eastmoney_balance_sheet_recorder import * +from .eastmoney_balance_sheet_recorder import __all__ as _eastmoney_balance_sheet_recorder_all + +__all__ += _eastmoney_balance_sheet_recorder_all diff --git a/src/zvt/recorders/eastmoney/finance/base_china_stock_finance_recorder.py b/src/zvt/recorders/eastmoney/finance/base_china_stock_finance_recorder.py new file mode 100644 index 00000000..b9559ece --- /dev/null +++ b/src/zvt/recorders/eastmoney/finance/base_china_stock_finance_recorder.py @@ -0,0 +1,229 @@ +# -*- coding: utf-8 -*- + +from jqdatapy.api import get_fundamentals, get_query_count + +from zvt.api.utils import to_report_period_type +from zvt.contract.api import get_data +from zvt.domain import FinanceFactor, ReportPeriod +from zvt.recorders.eastmoney.common import ( + company_type_flag, + get_fc, + EastmoneyTimestampsDataRecorder, + call_eastmoney_api, + get_from_path_fields, +) +from zvt.recorders.joinquant.common import to_jq_entity_id +from zvt.utils.pd_utils import index_df +from zvt.utils.pd_utils import pd_is_not_null +from zvt.utils.time_utils import to_time_str, to_pd_timestamp + + +def to_jq_report_period(timestamp): + the_date = to_pd_timestamp(timestamp) + report_period = to_report_period_type(timestamp) + if report_period == ReportPeriod.year.value: + return "{}".format(the_date.year) + if report_period == ReportPeriod.season1.value: + return "{}q1".format(the_date.year) + if report_period == ReportPeriod.half_year.value: + return "{}q2".format(the_date.year) + if report_period == ReportPeriod.season3.value: + return "{}q3".format(the_date.year) + + assert False + + +class BaseChinaStockFinanceRecorder(EastmoneyTimestampsDataRecorder): + finance_report_type = None + data_type = 1 + + timestamps_fetching_url = "https://emh5.eastmoney.com/api/CaiWuFenXi/GetCompanyReportDateList" + timestamp_list_path_fields = ["CompanyReportDateList"] + timestamp_path_fields = ["ReportDate"] + + def __init__( + self, + exchanges=None, + entity_id=None, + entity_ids=None, + code=None, + codes=None, + day_data=False, + force_update=False, + sleeping_time=5, + real_time=False, + fix_duplicate_way="add", + start_timestamp=None, + end_timestamp=None, + ) -> None: + super().__init__( + force_update, + sleeping_time, + exchanges, + entity_id, + entity_ids, + code, + codes, + day_data, + real_time=real_time, + fix_duplicate_way=fix_duplicate_way, + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + ) + + try: + self.logger.info(f"joinquant query count:{get_query_count()}") + self.fetch_jq_timestamp = True + except Exception as e: + self.fetch_jq_timestamp = False + self.logger.warning( + f"joinquant account not ok,the timestamp(publish date) for finance would be not correct. {e}" + ) + + def init_timestamps(self, entity): + param = {"color": "w", "fc": get_fc(entity), "DataType": self.data_type} + + if self.finance_report_type == "LiRunBiaoList" or self.finance_report_type == "XianJinLiuLiangBiaoList": + param["ReportType"] = 1 + + timestamp_json_list = call_eastmoney_api( + url=self.timestamps_fetching_url, path_fields=self.timestamp_list_path_fields, param=param + ) + + if self.timestamp_path_fields: + timestamps = [get_from_path_fields(data, self.timestamp_path_fields) for data in timestamp_json_list] + + return [to_pd_timestamp(t) for t in timestamps] + + def generate_request_param(self, security_item, start, end, size, timestamps): + if len(timestamps) <= 10: + param = { + "color": "w", + "fc": get_fc(security_item), + "corpType": company_type_flag(security_item), + # 0 means get all types + "reportDateType": 0, + "endDate": "", + "latestCount": size, + } + else: + param = { + "color": "w", + "fc": get_fc(security_item), + "corpType": company_type_flag(security_item), + # 0 means get all types + "reportDateType": 0, + "endDate": to_time_str(timestamps[10]), + "latestCount": 10, + } + + if self.finance_report_type == "LiRunBiaoList" or self.finance_report_type == "XianJinLiuLiangBiaoList": + param["reportType"] = 1 + + return param + + def generate_path_fields(self, security_item): + comp_type = company_type_flag(security_item) + + if comp_type == "3": + return ["{}_YinHang".format(self.finance_report_type)] + elif comp_type == "2": + return ["{}_BaoXian".format(self.finance_report_type)] + elif comp_type == "1": + return ["{}_QuanShang".format(self.finance_report_type)] + elif comp_type == "4": + return ["{}_QiYe".format(self.finance_report_type)] + + def record(self, entity, start, end, size, timestamps): + # different with the default timestamps handling + param = self.generate_request_param(entity, start, end, size, timestamps) + self.logger.info("request param:{}".format(param)) + + return self.api_wrapper.request( + url=self.url, param=param, method=self.request_method, path_fields=self.generate_path_fields(entity) + ) + + def get_original_time_field(self): + return "ReportDate" + + def fill_timestamp_with_jq(self, security_item, the_data): + # get report published date from jq + try: + df = get_fundamentals( + table="indicator", + code=to_jq_entity_id(security_item), + columns="pubDate", + date=to_jq_report_period(the_data.report_date), + count=None, + parse_dates=["pubDate"], + ) + if pd_is_not_null(df): + the_data.timestamp = to_pd_timestamp(df["pubDate"][0]) + self.logger.info( + "jq fill {} {} timestamp:{} for report_date:{}".format( + self.data_schema, security_item.id, the_data.timestamp, the_data.report_date + ) + ) + self.session.commit() + except Exception as e: + self.logger.error(f"Failed to fill timestamp(publish date) for finance data from joinquant {e}") + + def on_finish_entity(self, entity): + super().on_finish_entity(entity) + + if not self.fetch_jq_timestamp: + return + + # fill the timestamp for report published date + the_data_list = get_data( + data_schema=self.data_schema, + provider=self.provider, + entity_id=entity.id, + order=self.data_schema.timestamp.asc(), + return_type="domain", + session=self.session, + filters=[ + self.data_schema.timestamp == self.data_schema.report_date, + self.data_schema.timestamp >= to_pd_timestamp("2005-01-01"), + ], + ) + if the_data_list: + if self.data_schema == FinanceFactor: + for the_data in the_data_list: + self.fill_timestamp_with_jq(entity, the_data) + else: + df = FinanceFactor.query_data( + entity_id=entity.id, + columns=[FinanceFactor.timestamp, FinanceFactor.report_date, FinanceFactor.id], + filters=[ + FinanceFactor.timestamp != FinanceFactor.report_date, + FinanceFactor.timestamp >= to_pd_timestamp("2005-01-01"), + FinanceFactor.report_date >= the_data_list[0].report_date, + FinanceFactor.report_date <= the_data_list[-1].report_date, + ], + ) + + if pd_is_not_null(df): + index_df(df, index="report_date", time_field="report_date") + + for the_data in the_data_list: + if (df is not None) and (not df.empty) and the_data.report_date in df.index: + the_data.timestamp = df.at[the_data.report_date, "timestamp"] + self.logger.info( + "db fill {} {} timestamp:{} for report_date:{}".format( + self.data_schema, entity.id, the_data.timestamp, the_data.report_date + ) + ) + self.session.commit() + else: + # self.logger.info( + # 'waiting jq fill {} {} timestamp:{} for report_date:{}'.format(self.data_schema, + # security_item.id, + # the_data.timestamp, + # the_data.report_date)) + + self.fill_timestamp_with_jq(entity, the_data) + + +# the __all__ is generated +__all__ = ["to_jq_report_period", "BaseChinaStockFinanceRecorder"] diff --git a/zvt/recorders/eastmoney/finance/china_stock_balance_sheet_recorder.py b/src/zvt/recorders/eastmoney/finance/eastmoney_balance_sheet_recorder.py similarity index 97% rename from zvt/recorders/eastmoney/finance/china_stock_balance_sheet_recorder.py rename to src/zvt/recorders/eastmoney/finance/eastmoney_balance_sheet_recorder.py index 9bd75c8e..a58e039c 100644 --- a/zvt/recorders/eastmoney/finance/china_stock_balance_sheet_recorder.py +++ b/src/zvt/recorders/eastmoney/finance/eastmoney_balance_sheet_recorder.py @@ -1,9 +1,9 @@ # -*- coding: utf-8 -*- -from zvt.utils.time_utils import to_pd_timestamp -from zvt.utils.utils import add_func_to_value, first_item_to_float -from zvt.api.quote import to_report_period_type +from zvt.api.utils import to_report_period_type from zvt.domain import BalanceSheet from zvt.recorders.eastmoney.finance.base_china_stock_finance_recorder import BaseChinaStockFinanceRecorder +from zvt.utils.time_utils import to_pd_timestamp +from zvt.utils.utils import add_func_to_value, first_item_to_float balance_sheet_map = { # 流动资产 @@ -114,7 +114,6 @@ "total_equity": "Sumshequity", # 负债和股东权益合计 "total_liabilities_and_equity": "Sumliabshequity", - # 银行相关 # 资产 # 现金及存放中央银行款项 @@ -209,7 +208,6 @@ # 股东权益合计 # # 负债及股东权益总计 - # 券商相关 # 资产 # @@ -312,7 +310,6 @@ # 股东权益合计 # # 负债和股东权益总计 - # 保险相关 # 应收保费 "fi_premiums_receivable": "Premiumrec", @@ -426,7 +423,6 @@ # 股东权益合计 # # 负债和股东权益总计 - } add_func_to_value(balance_sheet_map, first_item_to_float) @@ -437,17 +433,19 @@ class ChinaStockBalanceSheetRecorder(BaseChinaStockFinanceRecorder): data_schema = BalanceSheet - url = 'https://emh5.eastmoney.com/api/CaiWuFenXi/GetZiChanFuZhaiBiaoList' - finance_report_type = 'ZiChanFuZhaiBiaoList' + url = "https://emh5.eastmoney.com/api/CaiWuFenXi/GetZiChanFuZhaiBiaoList" + finance_report_type = "ZiChanFuZhaiBiaoList" data_type = 3 def get_data_map(self): return balance_sheet_map -__all__ = ['ChinaStockBalanceSheetRecorder'] - -if __name__ == '__main__': +if __name__ == "__main__": # init_log('blance_sheet.log') - recorder = ChinaStockBalanceSheetRecorder(codes=['002572']) + recorder = ChinaStockBalanceSheetRecorder(codes=["002572"]) recorder.run() + + +# the __all__ is generated +__all__ = ["ChinaStockBalanceSheetRecorder"] diff --git a/zvt/recorders/eastmoney/finance/china_stock_cash_flow_recorder.py b/src/zvt/recorders/eastmoney/finance/eastmoney_cash_flow_recorder.py similarity index 96% rename from zvt/recorders/eastmoney/finance/china_stock_cash_flow_recorder.py rename to src/zvt/recorders/eastmoney/finance/eastmoney_cash_flow_recorder.py index 7c36a83a..1598818d 100644 --- a/zvt/recorders/eastmoney/finance/china_stock_cash_flow_recorder.py +++ b/src/zvt/recorders/eastmoney/finance/eastmoney_cash_flow_recorder.py @@ -1,25 +1,21 @@ # -*- coding: utf-8 -*- -from zvt.utils.time_utils import to_pd_timestamp -from zvt.utils.utils import add_func_to_value, first_item_to_float -from zvt.api.quote import to_report_period_type +from zvt.api.utils import to_report_period_type from zvt.domain import CashFlowStatement from zvt.recorders.eastmoney.finance.base_china_stock_finance_recorder import BaseChinaStockFinanceRecorder +from zvt.utils.time_utils import to_pd_timestamp +from zvt.utils.utils import add_func_to_value, first_item_to_float cash_flow_map = { # 经营活动产生的现金流量 # # 销售商品、提供劳务收到的现金 "cash_from_selling": "Salegoodsservicerec", - # 收到的税费返还 "tax_refund": "Taxreturnrec", - # 收到其他与经营活动有关的现金 "cash_from_other_op": "Otheroperaterec", - # 经营活动现金流入小计 "total_op_cash_inflows": "Sumoperateflowin", - # 购买商品、接受劳务支付的现金 "cash_to_goods_services": "Buygoodsservicepay", # 支付给职工以及为职工支付的现金 @@ -30,12 +26,9 @@ "cash_to_other_related_op": "Otheroperatepay", # 经营活动现金流出小计 "total_op_cash_outflows": "Sumoperateflowout", - # 经营活动产生的现金流量净额 "net_op_cash_flows": "Netoperatecashflow", - # 投资活动产生的现金流量 - # 收回投资收到的现金 "cash_from_disposal_of_investments": "Disposalinvrec", # 取得投资收益收到的现金 @@ -44,61 +37,46 @@ "cash_from_disposal_fixed_intangible_assets": "Dispfilassetrec", # 处置子公司及其他营业单位收到的现金净额 "cash_from_disposal_subsidiaries": "Dispsubsidiaryrec", - # 收到其他与投资活动有关的现金 "cash_from_other_investing": "Otherinvrec", - # 投资活动现金流入小计 "total_investing_cash_inflows": "Suminvflowin", - # 购建固定资产、无形资产和其他长期资产支付的现金 "cash_to_acquire_fixed_intangible_assets": "Buyfilassetpay", # 投资支付的现金 "cash_to_investments": "Invpay", - # 取得子公司及其他营业单位支付的现金净额 "cash_to_acquire_subsidiaries": "Getsubsidiarypay", - # 支付其他与投资活动有关的现金 "cash_to_other_investing": "Otherinvpay", - # 投资活动现金流出小计 "total_investing_cash_outflows": "Suminvflowout", - # 投资活动产生的现金流量净额 "net_investing_cash_flows": "Netinvcashflow", - # 筹资活动产生的现金流量 # # 吸收投资收到的现金 "cash_from_accepting_investment": "Acceptinvrec", # 子公司吸收少数股东投资收到的现金 "cash_from_subsidiaries_accepting_minority_interest": "Subsidiaryaccept", - # 取得借款收到的现金 "cash_from_borrowings": "Loanrec", # 发行债券收到的现金 "cash_from_issuing_bonds": "Issuebondrec", # 收到其他与筹资活动有关的现金 "cash_from_other_financing": "Otherfinarec", - # 筹资活动现金流入小计 "total_financing_cash_inflows": "Sumfinaflowin", - # 偿还债务支付的现金 "cash_to_repay_borrowings": "Repaydebtpay", - # 分配股利、利润或偿付利息支付的现金 "cash_to_pay_interest_dividend": "Diviprofitorintpay", - # 子公司支付给少数股东的股利、利润 "cash_to_pay_subsidiaries_minority_interest": "Subsidiarypay", - # 支付其他与筹资活动有关的现金 "cash_to_other_financing": "Otherfinapay", # 筹资活动现金流出小计 "total_financing_cash_outflows": "Sumfinaflowout", - # 筹资活动产生的现金流量净额 "net_financing_cash_flows": "Netfinacashflow", # 汇率变动对现金及现金等价物的影响 @@ -109,7 +87,6 @@ "cash_at_beginning": "Cashequibeginning", # 期末现金及现金等价物余额 "cash": "Cashequiending", - # 银行相关 # 客户存款和同业及其他金融机构存放款项净增加额 "fi_deposit_increase": "Nideposit", @@ -149,7 +126,6 @@ "fi_account_receivable_increase": "Niaccountrec", # 偿付债券利息支付的现金 "fi_cash_to_pay_interest": "Bondintpay", - # 保险相关 # 收到原保险合同保费取得的现金 "fi_cash_from_premium_of_original": "Premiumrec", @@ -177,7 +153,6 @@ "fi_cash_to_disposal_subsidiaries": "Dispsubsidiarypay", # 支付卖出回购金融资产款现金净额 "fi_cash_to_sell_repurchase": "Netsellbuybackfassetpay", - # 券商相关 # 拆入资金净增加额 "fi_borrowing_increase": "Niborrowfund", @@ -201,17 +176,19 @@ class ChinaStockCashFlowRecorder(BaseChinaStockFinanceRecorder): data_schema = CashFlowStatement - url = 'https://emh5.eastmoney.com/api/CaiWuFenXi/GetXianJinLiuLiangBiaoList' - finance_report_type = 'XianJinLiuLiangBiaoList' + url = "https://emh5.eastmoney.com/api/CaiWuFenXi/GetXianJinLiuLiangBiaoList" + finance_report_type = "XianJinLiuLiangBiaoList" data_type = 4 def get_data_map(self): return cash_flow_map -__all__ = ['ChinaStockCashFlowRecorder'] - -if __name__ == '__main__': +if __name__ == "__main__": # init_log('cash_flow.log') - recorder = ChinaStockCashFlowRecorder(codes=['002572']) + recorder = ChinaStockCashFlowRecorder(codes=["002572"]) recorder.run() + + +# the __all__ is generated +__all__ = ["ChinaStockCashFlowRecorder"] diff --git a/zvt/recorders/eastmoney/finance/china_stock_finance_factor_recorder.py b/src/zvt/recorders/eastmoney/finance/eastmoney_finance_factor_recorder.py similarity index 93% rename from zvt/recorders/eastmoney/finance/china_stock_finance_factor_recorder.py rename to src/zvt/recorders/eastmoney/finance/eastmoney_finance_factor_recorder.py index 6cd31780..d920d78a 100644 --- a/zvt/recorders/eastmoney/finance/china_stock_finance_factor_recorder.py +++ b/src/zvt/recorders/eastmoney/finance/eastmoney_finance_factor_recorder.py @@ -1,9 +1,9 @@ # -*- coding: utf-8 -*- -from zvt.utils.time_utils import to_pd_timestamp -from zvt.utils.utils import add_func_to_value, to_float -from zvt.api.quote import to_report_period_type +from zvt.api.utils import to_report_period_type from zvt.domain import FinanceFactor from zvt.recorders.eastmoney.finance.base_china_stock_finance_recorder import BaseChinaStockFinanceRecorder +from zvt.utils.time_utils import to_pd_timestamp +from zvt.utils.utils import add_func_to_value, to_float finance_factor_map = { # 基本每股收益(元) @@ -92,7 +92,6 @@ "inventory_turnover": "Inventoryrate", # 应收账款周转率(次) "receivables_turnover": "Accountsreceiveablerate", - # 专项指标(银行) # # 存款总额 @@ -143,8 +142,8 @@ class ChinaStockFinanceFactorRecorder(BaseChinaStockFinanceRecorder): - url = 'https://emh5.eastmoney.com/api/CaiWuFenXi/GetZhuYaoZhiBiaoList' - finance_report_type = 'ZhuYaoZhiBiaoList' + url = "https://emh5.eastmoney.com/api/CaiWuFenXi/GetZhuYaoZhiBiaoList" + finance_report_type = "ZhuYaoZhiBiaoList" data_schema = FinanceFactor data_type = 1 @@ -153,9 +152,11 @@ def get_data_map(self): return finance_factor_map -__all__ = ['ChinaStockFinanceFactorRecorder'] - -if __name__ == '__main__': +if __name__ == "__main__": # init_log('finance_factor.log') - recorder = ChinaStockFinanceFactorRecorder(codes=['000001']) + recorder = ChinaStockFinanceFactorRecorder(codes=["000001"]) recorder.run() + + +# the __all__ is generated +__all__ = ["ChinaStockFinanceFactorRecorder"] diff --git a/zvt/recorders/eastmoney/finance/china_stock_income_statement_recorder.py b/src/zvt/recorders/eastmoney/finance/eastmoney_income_statement_recorder.py similarity index 95% rename from zvt/recorders/eastmoney/finance/china_stock_income_statement_recorder.py rename to src/zvt/recorders/eastmoney/finance/eastmoney_income_statement_recorder.py index 8a09839b..77aba6d2 100644 --- a/zvt/recorders/eastmoney/finance/china_stock_income_statement_recorder.py +++ b/src/zvt/recorders/eastmoney/finance/eastmoney_income_statement_recorder.py @@ -1,9 +1,9 @@ # -*- coding: utf-8 -*- -from zvt.utils.time_utils import to_pd_timestamp -from zvt.utils.utils import add_func_to_value, first_item_to_float -from zvt.api.quote import to_report_period_type +from zvt.api.utils import to_report_period_type from zvt.domain import IncomeStatement from zvt.recorders.eastmoney.finance.base_china_stock_finance_recorder import BaseChinaStockFinanceRecorder +from zvt.utils.time_utils import to_pd_timestamp +from zvt.utils.utils import add_func_to_value, first_item_to_float income_statement_map = { # 营业总收入 @@ -42,7 +42,6 @@ "non_operating_costs": "Nonoperateexp", # 其中: 非流动资产处置净损失 "loss_on_disposal_non_current_asset": "Nonlassetnetloss", - # 利润总额 "total_profits": "Sumprofit", # 减: 所得税费用 @@ -72,7 +71,6 @@ "total_comprehensive_income_as_parent": "Parentcincome", # 归属于少数股东的综合收益总额 "total_comprehensive_income_as_minority_interest": "Minoritycincome", - # 银行相关 # 利息净收入 "fi_net_interest_income": "Intnreve", @@ -94,7 +92,6 @@ "fi_other_income": "Otherreve", # 业务及管理费 "fi_operate_and_manage_expenses": "Operatemanageexp", - # 保险相关 # 已赚保费 "fi_net_income_from_premium": "Premiumearned", @@ -136,7 +133,6 @@ "fi_amortized_reinsurance_expenses": "Amortiseriexp", # 其他业务成本 "fi_other_op_expenses": "Otherexp", - # 券商相关 # 手续费及佣金净收入 # @@ -162,8 +158,8 @@ class ChinaStockIncomeStatementRecorder(BaseChinaStockFinanceRecorder): data_schema = IncomeStatement - url = 'https://emh5.eastmoney.com/api/CaiWuFenXi/GetLiRunBiaoList' - finance_report_type = 'LiRunBiaoList' + url = "https://emh5.eastmoney.com/api/CaiWuFenXi/GetLiRunBiaoList" + finance_report_type = "LiRunBiaoList" data_type = 2 @@ -171,9 +167,11 @@ def get_data_map(self): return income_statement_map -__all__ = ['ChinaStockIncomeStatementRecorder'] - -if __name__ == '__main__': +if __name__ == "__main__": # init_log('income_statement.log') - recorder = ChinaStockIncomeStatementRecorder(codes=['002572']) + recorder = ChinaStockIncomeStatementRecorder(codes=["002572"]) recorder.run() + + +# the __all__ is generated +__all__ = ["ChinaStockIncomeStatementRecorder"] diff --git a/src/zvt/recorders/eastmoney/holder/__init__.py b/src/zvt/recorders/eastmoney/holder/__init__.py new file mode 100644 index 00000000..334b2712 --- /dev/null +++ b/src/zvt/recorders/eastmoney/holder/__init__.py @@ -0,0 +1,27 @@ +# -*- coding: utf-8 -*-# + + +# the __all__ is generated +__all__ = [] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule eastmoney_top_ten_holder_recorder +from .eastmoney_top_ten_holder_recorder import * +from .eastmoney_top_ten_holder_recorder import __all__ as _eastmoney_top_ten_holder_recorder_all + +__all__ += _eastmoney_top_ten_holder_recorder_all + +# import all from submodule eastmoney_top_ten_tradable_holder_recorder +from .eastmoney_top_ten_tradable_holder_recorder import * +from .eastmoney_top_ten_tradable_holder_recorder import __all__ as _eastmoney_top_ten_tradable_holder_recorder_all + +__all__ += _eastmoney_top_ten_tradable_holder_recorder_all + +# import all from submodule eastmoney_stock_actor_recorder +from .eastmoney_stock_actor_recorder import * +from .eastmoney_stock_actor_recorder import __all__ as _eastmoney_stock_actor_recorder_all + +__all__ += _eastmoney_stock_actor_recorder_all diff --git a/src/zvt/recorders/eastmoney/holder/eastmoney_stock_actor_recorder.py b/src/zvt/recorders/eastmoney/holder/eastmoney_stock_actor_recorder.py new file mode 100644 index 00000000..506e03ae --- /dev/null +++ b/src/zvt/recorders/eastmoney/holder/eastmoney_stock_actor_recorder.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +import requests + +from zvt.api.utils import get_recent_report_date +from zvt.contract.recorder import Recorder +from zvt.domain.actor.actor_meta import ActorMeta +from zvt.utils.time_utils import to_pd_timestamp + + +class EastmoneyActorRecorder(Recorder): + name = "eastmoney_actor_recorder" + provider = "eastmoney" + data_schema = ActorMeta + + url = "https://datacenter.eastmoney.com/securities/api/data/v1/get?reportName=RPT_FREEHOLDERS_BASIC_INFO&columns=HOLDER_NAME,END_DATE,HOLDER_NEW,HOLDER_NUM,HOLDER_CODE"eColumns=&filter=(END_DATE='{}')&pageNumber={}&pageSize={}&sortTypes=-1,-1&sortColumns=HOLDER_NUM,HOLDER_NAME&source=SECURITIES&client=SW" + + start = "2016-03-31" + + def get_data(self, end_date, pn, ps): + resp = requests.get(url=self.url.format(end_date, pn, ps)) + return resp.json() + + def run(self): + current_date = get_recent_report_date() + pn = 1 + ps = 2000 + + while to_pd_timestamp(current_date) >= to_pd_timestamp(self.start): + if not self.state: + current_date = get_recent_report_date() + result = self.get_data(end_date=current_date, pn=pn, ps=ps) + print(result) + self.state = {"end_date": current_date, "pages": result["result"]["pages"], "pn": pn, "ps": ps} + self.persist_state("stock_sz_000001", self.state) + else: + if self.state["pn"] >= self.state["pages"]: + current_date = get_recent_report_date(the_date=self.state["end_date"], step=1) + pn = pn + ps = ps + else: + pn = self.state["pn"] + 1 + ps = self.state["ps"] + current_date = self.state["end_date"] + + result = self.get_data(end_date=current_date, pn=pn, ps=ps) + print(result) + self.state = {"end_date": current_date, "pages": result["result"]["pages"], "pn": pn, "ps": ps} + self.persist_state("stock_sz_000001", self.state) + + +if __name__ == "__main__": + EastmoneyActorRecorder().run() + + +# the __all__ is generated +__all__ = ["EastmoneyActorRecorder"] diff --git a/zvt/recorders/eastmoney/holder/top_ten_holder_recorder.py b/src/zvt/recorders/eastmoney/holder/eastmoney_top_ten_holder_recorder.py similarity index 69% rename from zvt/recorders/eastmoney/holder/top_ten_holder_recorder.py rename to src/zvt/recorders/eastmoney/holder/eastmoney_top_ten_holder_recorder.py index c98f4166..b580d1aa 100644 --- a/zvt/recorders/eastmoney/holder/top_ten_holder_recorder.py +++ b/src/zvt/recorders/eastmoney/holder/eastmoney_top_ten_holder_recorder.py @@ -1,21 +1,21 @@ # -*- coding: utf-8 -*- -from zvt.utils.time_utils import to_time_str, to_pd_timestamp -from zvt.utils.utils import to_float -from zvt.api.quote import to_report_period_type +from zvt.api.utils import to_report_period_type from zvt.domain.misc.holder import TopTenHolder from zvt.recorders.eastmoney.common import EastmoneyTimestampsDataRecorder, get_fc +from zvt.utils.time_utils import to_time_str, to_pd_timestamp +from zvt.utils.utils import to_float class TopTenHolderRecorder(EastmoneyTimestampsDataRecorder): - provider = 'eastmoney' + provider = "eastmoney" data_schema = TopTenHolder - url = 'https://emh5.eastmoney.com/api/GuBenGuDong/GetShiDaGuDong' - path_fields = ['ShiDaGuDongList'] + url = "https://emh5.eastmoney.com/api/GuBenGuDong/GetShiDaGuDong" + path_fields = ["ShiDaGuDongList"] - timestamps_fetching_url = 'https://emh5.eastmoney.com/api/GuBenGuDong/GetFirstRequest2Data' - timestamp_list_path_fields = ['SDGDBGQ', 'ShiDaGuDongBaoGaoQiList'] - timestamp_path_fields = ['BaoGaoQi'] + timestamps_fetching_url = "https://emh5.eastmoney.com/api/GuBenGuDong/GetFirstRequest2Data" + timestamp_list_path_fields = ["SDGDBGQ", "ShiDaGuDongBaoGaoQiList"] + timestamp_path_fields = ["BaoGaoQi"] def get_data_map(self): return { @@ -36,10 +36,7 @@ def get_data_map(self): } def generate_request_param(self, security_item, start, end, size, timestamp): - return {"color": "w", - "fc": get_fc(security_item), - "BaoGaoQi": to_time_str(timestamp) - } + return {"color": "w", "fc": get_fc(security_item), "BaoGaoQi": to_time_str(timestamp)} def generate_domain_id(self, entity, original_data): the_name = original_data.get("GuDongMingCheng") @@ -48,9 +45,11 @@ def generate_domain_id(self, entity, original_data): return the_id -__all__ = ['TopTenHolder'] - -if __name__ == '__main__': +if __name__ == "__main__": # init_log('top_ten_holder.log') - TopTenHolderRecorder(codes=['002572']).run() + TopTenHolderRecorder(codes=["002572"]).run() + + +# the __all__ is generated +__all__ = ["TopTenHolderRecorder"] diff --git a/src/zvt/recorders/eastmoney/holder/eastmoney_top_ten_tradable_holder_recorder.py b/src/zvt/recorders/eastmoney/holder/eastmoney_top_ten_tradable_holder_recorder.py new file mode 100644 index 00000000..9033f5b6 --- /dev/null +++ b/src/zvt/recorders/eastmoney/holder/eastmoney_top_ten_tradable_holder_recorder.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +from zvt.domain import TopTenTradableHolder +from zvt.recorders.eastmoney.holder.eastmoney_top_ten_holder_recorder import TopTenHolderRecorder + + +class TopTenTradableHolderRecorder(TopTenHolderRecorder): + provider = "eastmoney" + data_schema = TopTenTradableHolder + + url = "https://emh5.eastmoney.com/api/GuBenGuDong/GetShiDaLiuTongGuDong" + path_fields = ["ShiDaLiuTongGuDongList"] + timestamps_fetching_url = "https://emh5.eastmoney.com/api/GuBenGuDong/GetFirstRequest2Data" + timestamp_list_path_fields = ["SDLTGDBGQ", "ShiDaLiuTongGuDongBaoGaoQiList"] + timestamp_path_fields = ["BaoGaoQi"] + + +if __name__ == "__main__": + # init_log('top_ten_tradable_holder.log') + + TopTenTradableHolderRecorder(codes=["002572"]).run() + + +# the __all__ is generated +__all__ = ["TopTenTradableHolderRecorder"] diff --git a/src/zvt/recorders/eastmoney/meta/__init__.py b/src/zvt/recorders/eastmoney/meta/__init__.py new file mode 100644 index 00000000..b2cfa5c6 --- /dev/null +++ b/src/zvt/recorders/eastmoney/meta/__init__.py @@ -0,0 +1,21 @@ +# -*- coding: utf-8 -*-# + + +# the __all__ is generated +__all__ = [] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule eastmoney_block_meta_recorder +from .eastmoney_block_meta_recorder import * +from .eastmoney_block_meta_recorder import __all__ as _eastmoney_block_meta_recorder_all + +__all__ += _eastmoney_block_meta_recorder_all + +# import all from submodule eastmoney_stock_meta_recorder +from .eastmoney_stock_meta_recorder import * +from .eastmoney_stock_meta_recorder import __all__ as _eastmoney_stock_meta_recorder_all + +__all__ += _eastmoney_stock_meta_recorder_all diff --git a/zvt/recorders/eastmoney/meta/china_stock_category_recorder.py b/src/zvt/recorders/eastmoney/meta/eastmoney_block_meta_recorder.py similarity index 50% rename from zvt/recorders/eastmoney/meta/china_stock_category_recorder.py rename to src/zvt/recorders/eastmoney/meta/eastmoney_block_meta_recorder.py index 1bd2d50f..981e2832 100644 --- a/zvt/recorders/eastmoney/meta/china_stock_category_recorder.py +++ b/src/zvt/recorders/eastmoney/meta/eastmoney_block_meta_recorder.py @@ -2,107 +2,106 @@ import pandas as pd import requests +from zvt.api.utils import china_stock_code_to_id from zvt.contract.api import df_to_db from zvt.contract.recorder import Recorder, TimeSeriesDataRecorder +from zvt.domain import BlockStock, BlockCategory, Block +from zvt.recorders.consts import DEFAULT_HEADER from zvt.utils.time_utils import now_pd_timestamp from zvt.utils.utils import json_callback_param -from zvt.api.quote import china_stock_code_to_id -from zvt.domain import BlockStock, BlockCategory, Block -class EastmoneyChinaBlockRecorder(Recorder): - provider = 'eastmoney' +class EastmoneyBlockRecorder(Recorder): + provider = "eastmoney" data_schema = Block # 用于抓取行业/概念/地域列表 category_map_url = { - BlockCategory.industry: 'https://nufm.dfcfw.com/EM_Finance2014NumericApplication/JS.aspx?type=CT&cmd=C._BKHY&sty=DCRRBKCPAL&st=(ChangePercent)&sr=-1&p=1&ps=200&lvl=&cb=jsonp_F1A61014DE5E45B7A50068EA290BC918&token=4f1862fc3b5e77c150a2b985b12db0fd&_=08766', - BlockCategory.concept: 'https://nufm.dfcfw.com/EM_Finance2014NumericApplication/JS.aspx?type=CT&cmd=C._BKGN&sty=DCRRBKCPAL&st=(ChangePercent)&sr=-1&p=1&ps=300&lvl=&cb=jsonp_3071689CC1E6486A80027D69E8B33F26&token=4f1862fc3b5e77c150a2b985b12db0fd&_=08251', + BlockCategory.industry: "https://nufm.dfcfw.com/EM_Finance2014NumericApplication/JS.aspx?type=CT&cmd=C._BKHY&sty=DCRRBKCPAL&st=(ChangePercent)&sr=-1&p=1&ps=200&lvl=&cb=jsonp_F1A61014DE5E45B7A50068EA290BC918&token=4f1862fc3b5e77c150a2b985b12db0fd&_=08766", + BlockCategory.concept: "https://nufm.dfcfw.com/EM_Finance2014NumericApplication/JS.aspx?type=CT&cmd=C._BKGN&sty=DCRRBKCPAL&st=(ChangePercent)&sr=-1&p=1&ps=300&lvl=&cb=jsonp_3071689CC1E6486A80027D69E8B33F26&token=4f1862fc3b5e77c150a2b985b12db0fd&_=08251", # BlockCategory.area: 'https://nufm.dfcfw.com/EM_Finance2014NumericApplication/JS.aspx?type=CT&cmd=C._BKDY&sty=DCRRBKCPAL&st=(ChangePercent)&sr=-1&p=1&ps=200&lvl=&cb=jsonp_A597D4867B3D4659A203AADE5B3B3AD5&token=4f1862fc3b5e77c150a2b985b12db0fd&_=02443' } def run(self): for category, url in self.category_map_url.items(): - resp = requests.get(url) + resp = requests.get(url, headers=DEFAULT_HEADER) results = json_callback_param(resp.text) the_list = [] for result in results: - items = result.split(',') + items = result.split(",") code = items[1] name = items[2] - entity_id = f'block_cn_{code}' - the_list.append({ - 'id': entity_id, - 'entity_id': entity_id, - 'entity_type': 'block', - 'exchange': 'cn', - 'code': code, - 'name': name, - 'category': category.value - }) + entity_id = f"block_cn_{code}" + the_list.append( + { + "id": entity_id, + "entity_id": entity_id, + "entity_type": "block", + "exchange": "cn", + "code": code, + "name": name, + "category": category.value, + } + ) if the_list: df = pd.DataFrame.from_records(the_list) - df_to_db(data_schema=self.data_schema, df=df, provider=self.provider, - force_update=True) - self.logger.info(f"finish record sina blocks:{category.value}") + df_to_db(data_schema=self.data_schema, df=df, provider=self.provider, force_update=self.force_update) + self.logger.info(f"finish record eastmoney blocks:{category.value}") -class EastmoneyChinaBlockStockRecorder(TimeSeriesDataRecorder): - entity_provider = 'eastmoney' +class EastmoneyBlockStockRecorder(TimeSeriesDataRecorder): + entity_provider = "eastmoney" entity_schema = Block - provider = 'eastmoney' + provider = "eastmoney" data_schema = BlockStock # 用于抓取行业包含的股票 - category_stocks_url = 'https://nufm.dfcfw.com/EM_Finance2014NumericApplication/JS.aspx?type=CT&cmd=C.{}{}&sty=SFCOO&st=(Close)&sr=-1&p=1&ps=300&cb=jsonp_B66B5BAA1C1B47B5BB9778045845B947&token=7bc05d0d4c3c22ef9fca8c2a912d779c' - - def __init__(self, exchanges=None, entity_ids=None, codes=None, day_data=False, batch_size=10, - force_update=False, sleeping_time=5, default_size=2000, real_time=False, fix_duplicate_way='add', - start_timestamp=None, end_timestamp=None, close_hour=0, close_minute=0) -> None: - super().__init__('block', exchanges, entity_ids, codes, day_data, batch_size, force_update, sleeping_time, - default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour, - close_minute) + category_stocks_url = "https://nufm.dfcfw.com/EM_Finance2014NumericApplication/JS.aspx?type=CT&cmd=C.{}{}&sty=SFCOO&st=(Close)&sr=-1&p=1&ps=300&cb=jsonp_B66B5BAA1C1B47B5BB9778045845B947&token=7bc05d0d4c3c22ef9fca8c2a912d779c" def record(self, entity, start, end, size, timestamps): - resp = requests.get(self.category_stocks_url.format(entity.code, '1')) + resp = requests.get(self.category_stocks_url.format(entity.code, "1"), headers=DEFAULT_HEADER) try: results = json_callback_param(resp.text) the_list = [] for result in results: - items = result.split(',') + items = result.split(",") stock_code = items[1] stock_id = china_stock_code_to_id(stock_code) block_id = entity.id - the_list.append({ - 'id': '{}_{}'.format(block_id, stock_id), - 'entity_id': block_id, - 'entity_type': 'block', - 'exchange': entity.exchange, - 'code': entity.code, - 'name': entity.name, - 'timestamp': now_pd_timestamp(), - 'stock_id': stock_id, - 'stock_code': stock_code, - 'stock_name': items[2], - - }) + the_list.append( + { + "id": "{}_{}".format(block_id, stock_id), + "entity_id": block_id, + "entity_type": "block", + "exchange": entity.exchange, + "code": entity.code, + "name": entity.name, + "timestamp": now_pd_timestamp(), + "stock_id": stock_id, + "stock_code": stock_code, + "stock_name": items[2], + } + ) if the_list: df = pd.DataFrame.from_records(the_list) df_to_db(data_schema=self.data_schema, df=df, provider=self.provider, force_update=True) - self.logger.info('finish recording block:{},{}'.format(entity.category, entity.name)) + self.logger.info("finish recording block:{},{}".format(entity.category, entity.name)) except Exception as e: self.logger.error("error:,resp.text:", e, resp.text) self.sleep() -__all__ = ['EastmoneyChinaBlockRecorder', 'EastmoneyChinaBlockStockRecorder'] - -if __name__ == '__main__': +if __name__ == "__main__": # init_log('china_stock_category.log') + EastmoneyBlockRecorder().run() - recorder = EastmoneyChinaBlockStockRecorder(codes=['BK0727']) + recorder = EastmoneyBlockStockRecorder(code="BK1144") recorder.run() + + +# the __all__ is generated +__all__ = ["EastmoneyBlockRecorder", "EastmoneyBlockStockRecorder"] diff --git a/src/zvt/recorders/eastmoney/meta/eastmoney_stock_meta_recorder.py b/src/zvt/recorders/eastmoney/meta/eastmoney_stock_meta_recorder.py new file mode 100644 index 00000000..13a346c4 --- /dev/null +++ b/src/zvt/recorders/eastmoney/meta/eastmoney_stock_meta_recorder.py @@ -0,0 +1,125 @@ +# -*- coding: utf-8 -*- + +import requests + +from zvt.contract.api import get_entities +from zvt.contract.recorder import Recorder +from zvt.domain.meta.stock_meta import StockDetail, Stock +from zvt.recorders.exchange.exchange_stock_meta_recorder import ExchangeStockMetaRecorder +from zvt.utils.time_utils import to_pd_timestamp +from zvt.utils.utils import to_float, pct_to_float + + +class EastmoneyStockRecorder(ExchangeStockMetaRecorder): + data_schema = Stock + provider = "eastmoney" + + +class EastmoneyStockDetailRecorder(Recorder): + provider = "eastmoney" + data_schema = StockDetail + + def __init__(self, force_update=False, sleeping_time=5, code=None, codes=None) -> None: + super().__init__(force_update, sleeping_time) + + # get list at first + EastmoneyStockRecorder().run() + + if codes is None and code is not None: + self.codes = [code] + else: + self.codes = codes + filters = None + if not self.force_update: + filters = [StockDetail.profile.is_(None)] + self.entities = get_entities( + session=self.session, + entity_schema=StockDetail, + exchanges=None, + codes=self.codes, + filters=filters, + return_type="domain", + provider=self.provider, + ) + + def run(self): + for security_item in self.entities: + assert isinstance(security_item, StockDetail) + + if security_item.exchange == "sh": + fc = "{}01".format(security_item.code) + if security_item.exchange == "sz": + fc = "{}02".format(security_item.code) + + # 基本资料 + # param = {"color": "w", "fc": fc, "SecurityCode": "SZ300059"} + + securities_code = f"{security_item.code}.{security_item.exchange.upper()}" + param = { + "type": "RPT_F10_ORG_BASICINFO", + "sty": "ORG_PROFIE,MAIN_BUSINESS,FOUND_DATE,EM2016,BLGAINIAN,REGIONBK", + "filter": f"(SECUCODE=\"{securities_code}\")", + "client": "app", + "source": "SECURITIES", + "pageNumber": 1, + "pageSize": 1 + } + resp = requests.get("https://datacenter.eastmoney.com/securities/api/data/get", params=param) + resp.encoding = "utf8" + + resp_json = resp.json()["result"]["data"][0] + + security_item.profile = resp_json["ORG_PROFIE"] + security_item.main_business = resp_json["MAIN_BUSINESS"] + security_item.date_of_establishment = to_pd_timestamp(resp_json["FOUND_DATE"]) + + # 关联行业 + industries = ",".join(resp_json["EM2016"].split("-")) + security_item.industries = industries + + # 关联概念 + security_item.concept_indices = resp_json["BLGAINIAN"] + + # 关联地区 + security_item.area_indices = resp_json["REGIONBK"] + + self.sleep() + + # 发行相关 + param = { + "reportName": "RPT_F10_ORG_ISSUEINFO", + "columns": "AFTER_ISSUE_PE,ISSUE_PRICE,TOTAL_ISSUE_NUM,NET_RAISE_FUNDS,ONLINE_ISSUE_LWR", + "filter": f"(SECUCODE=\"{securities_code}\")(TYPENEW=\"4\")", + "client": "app", + "source": "SECURITIES", + "pageNumber": 1, + "pageSize": 1 + } + resp = requests.get("https://datacenter.eastmoney.com/securities/api/data/v1/get", params=param) + resp.encoding = "utf8" + + resp_json = resp.json()["result"]["data"][0] + + security_item.issue_pe = resp_json["AFTER_ISSUE_PE"] + security_item.price = resp_json["ISSUE_PRICE"] + security_item.issues = resp_json["TOTAL_ISSUE_NUM"] + security_item.raising_fund = resp_json.get("NET_RAISE_FUNDS") + security_item.net_winning_rate = resp_json["ONLINE_ISSUE_LWR"] + + self.session.commit() + + self.logger.info("finish recording stock meta for:{}".format(security_item.code)) + + self.sleep() + + +if __name__ == "__main__": + # init_log('china_stock_meta.log') + + recorder = EastmoneyStockRecorder() + recorder.run() + StockDetail.record_data(codes=["000338", "000777"], provider="eastmoney") + + +# the __all__ is generated +__all__ = ["EastmoneyStockRecorder", "EastmoneyStockDetailRecorder"] diff --git a/src/zvt/recorders/eastmoney/trading/__init__.py b/src/zvt/recorders/eastmoney/trading/__init__.py new file mode 100644 index 00000000..664af647 --- /dev/null +++ b/src/zvt/recorders/eastmoney/trading/__init__.py @@ -0,0 +1,21 @@ +# -*- coding: utf-8 -*-# + + +# the __all__ is generated +__all__ = [] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule eastmoney_holder_trading_recorder +from .eastmoney_holder_trading_recorder import * +from .eastmoney_holder_trading_recorder import __all__ as _eastmoney_holder_trading_recorder_all + +__all__ += _eastmoney_holder_trading_recorder_all + +# import all from submodule eastmoney_manager_trading_recorder +from .eastmoney_manager_trading_recorder import * +from .eastmoney_manager_trading_recorder import __all__ as _eastmoney_manager_trading_recorder_all + +__all__ += _eastmoney_manager_trading_recorder_all diff --git a/zvt/recorders/eastmoney/trading/holder_trading_recorder.py b/src/zvt/recorders/eastmoney/trading/eastmoney_holder_trading_recorder.py similarity index 71% rename from zvt/recorders/eastmoney/trading/holder_trading_recorder.py rename to src/zvt/recorders/eastmoney/trading/eastmoney_holder_trading_recorder.py index 50da21e9..e3dee0f6 100644 --- a/zvt/recorders/eastmoney/trading/holder_trading_recorder.py +++ b/src/zvt/recorders/eastmoney/trading/eastmoney_holder_trading_recorder.py @@ -1,25 +1,24 @@ # -*- coding: utf-8 -*- -from zvt.utils.utils import to_float from zvt.domain import HolderTrading - from zvt.recorders.eastmoney.common import EastmoneyMoreDataRecorder +from zvt.utils.utils import to_float class HolderTradingRecorder(EastmoneyMoreDataRecorder): data_schema = HolderTrading - url = 'https://emh5.eastmoney.com/api/JiaoYiShuJu/GetGuDongZengJian' - path_fields = ['GuDongZengJianList'] + url = "https://emh5.eastmoney.com/api/JiaoYiShuJu/GetGuDongZengJian" + path_fields = ["GuDongZengJianList"] def get_original_time_field(self): - return 'RiQi' + return "RiQi" def get_data_map(self): return { "holder_name": ("GuDongMingCheng", str), "volume": ("BianDongShuLiang", to_float), "change_pct": ("BianDongBiLi", to_float), - "holding_pct": ("BianDongHouChiGuBiLi", to_float) + "holding_pct": ("BianDongHouChiGuBiLi", to_float), } def generate_domain_id(self, entity, original_data): @@ -29,10 +28,12 @@ def generate_domain_id(self, entity, original_data): return the_id -__all__ = ['HolderTradingRecorder'] - -if __name__ == '__main__': +if __name__ == "__main__": # init_log('holder_trading.log') - recorder = HolderTradingRecorder(codes=['002572']) + recorder = HolderTradingRecorder(codes=["002572"]) recorder.run() + + +# the __all__ is generated +__all__ = ["HolderTradingRecorder"] diff --git a/zvt/recorders/eastmoney/trading/manager_trading_recorder.py b/src/zvt/recorders/eastmoney/trading/eastmoney_manager_trading_recorder.py similarity index 79% rename from zvt/recorders/eastmoney/trading/manager_trading_recorder.py rename to src/zvt/recorders/eastmoney/trading/eastmoney_manager_trading_recorder.py index 1eeeb7f2..29831a95 100644 --- a/zvt/recorders/eastmoney/trading/manager_trading_recorder.py +++ b/src/zvt/recorders/eastmoney/trading/eastmoney_manager_trading_recorder.py @@ -1,18 +1,17 @@ # -*- coding: utf-8 -*- -from zvt.utils.utils import to_float from zvt.domain import ManagerTrading - from zvt.recorders.eastmoney.common import EastmoneyMoreDataRecorder +from zvt.utils.utils import to_float class ManagerTradingRecorder(EastmoneyMoreDataRecorder): data_schema = ManagerTrading - url = 'https://emh5.eastmoney.com/api/JiaoYiShuJu/GetGaoGuanZengJian' - path_fields = ['GaoGuanZengJianList'] + url = "https://emh5.eastmoney.com/api/JiaoYiShuJu/GetGaoGuanZengJian" + path_fields = ["GaoGuanZengJianList"] def get_original_time_field(self): - return 'RiQi' + return "RiQi" def get_data_map(self): return { @@ -33,10 +32,12 @@ def generate_domain_id(self, entity, original_data): return the_id -__all__ = ['ManagerTradingRecorder'] - -if __name__ == '__main__': +if __name__ == "__main__": # init_log('manager_trading.log') - recorder = ManagerTradingRecorder(codes=['002572']) + recorder = ManagerTradingRecorder(codes=["002572"]) recorder.run() + + +# the __all__ is generated +__all__ = ["ManagerTradingRecorder"] diff --git a/src/zvt/recorders/em/__init__.py b/src/zvt/recorders/em/__init__.py new file mode 100644 index 00000000..d431f304 --- /dev/null +++ b/src/zvt/recorders/em/__init__.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- + + +# the __all__ is generated +__all__ = [] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule trading +from .trading import * +from .trading import __all__ as _trading_all + +__all__ += _trading_all + +# import all from submodule actor +from .actor import * +from .actor import __all__ as _actor_all + +__all__ += _actor_all + +# import all from submodule misc +from .misc import * +from .misc import __all__ as _misc_all + +__all__ += _misc_all + +# import all from submodule quotes +from .quotes import * +from .quotes import __all__ as _quotes_all + +__all__ += _quotes_all + +# import all from submodule em_api +from .em_api import * +from .em_api import __all__ as _em_api_all + +__all__ += _em_api_all + +# import all from submodule macro +from .macro import * +from .macro import __all__ as _macro_all + +__all__ += _macro_all + +# import all from submodule meta +from .meta import * +from .meta import __all__ as _meta_all + +__all__ += _meta_all diff --git a/src/zvt/recorders/em/actor/__init__.py b/src/zvt/recorders/em/actor/__init__.py new file mode 100644 index 00000000..efeda8f9 --- /dev/null +++ b/src/zvt/recorders/em/actor/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- + + +# the __all__ is generated +__all__ = [] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule em_stock_top_ten_recorder +from .em_stock_top_ten_recorder import * +from .em_stock_top_ten_recorder import __all__ as _em_stock_top_ten_recorder_all + +__all__ += _em_stock_top_ten_recorder_all + +# import all from submodule em_stock_top_ten_free_recorder +from .em_stock_top_ten_free_recorder import * +from .em_stock_top_ten_free_recorder import __all__ as _em_stock_top_ten_free_recorder_all + +__all__ += _em_stock_top_ten_free_recorder_all + +# import all from submodule em_stock_ii_recorder +from .em_stock_ii_recorder import * +from .em_stock_ii_recorder import __all__ as _em_stock_ii_recorder_all + +__all__ += _em_stock_ii_recorder_all + +# import all from submodule em_stock_actor_summary_recorder +from .em_stock_actor_summary_recorder import * +from .em_stock_actor_summary_recorder import __all__ as _em_stock_actor_summary_recorder_all + +__all__ += _em_stock_actor_summary_recorder_all diff --git a/src/zvt/recorders/em/actor/em_stock_actor_summary_recorder.py b/src/zvt/recorders/em/actor/em_stock_actor_summary_recorder.py new file mode 100644 index 00000000..a82576b2 --- /dev/null +++ b/src/zvt/recorders/em/actor/em_stock_actor_summary_recorder.py @@ -0,0 +1,85 @@ +# -*- coding: utf-8 -*- +from typing import List + +import pandas as pd + +from zvt.api.utils import to_report_period_type, value_to_pct +from zvt.contract import ActorType +from zvt.contract.api import df_to_db +from zvt.contract.recorder import TimestampsDataRecorder +from zvt.domain import Stock +from zvt.domain.actor.stock_actor import StockActorSummary +from zvt.recorders.em.em_api import get_ii_holder_report_dates, actor_type_to_org_type, get_ii_summary +from zvt.utils.time_utils import to_pd_timestamp, to_time_str + + +# [{'CHANGE_RATIO': -1.045966694333, +# 'IS_COMPLETE': '1', +# 'ORG_TYPE': '07', +# 'REPORT_DATE': '2021-03-31 00:00:00', +# 'SECUCODE': '000338.SZ', +# 'SECURITY_CODE': '000338', +# 'TOTAL_FREE_SHARES': 2598718411, +# 'TOTAL_MARKET_CAP': 49999342227.64, +# 'TOTAL_ORG_NUM': 5, +# 'TOTAL_SHARES_RATIO': 29.51742666}] + + +class EMStockActorSummaryRecorder(TimestampsDataRecorder): + entity_provider = "em" + entity_schema = Stock + + provider = "em" + data_schema = StockActorSummary + + def init_timestamps(self, entity_item) -> List[pd.Timestamp]: + result = get_ii_holder_report_dates(code=entity_item.code) + if result: + return [to_pd_timestamp(item["REPORT_DATE"]) for item in result] + + def record(self, entity, start, end, size, timestamps): + for timestamp in timestamps: + the_date = to_time_str(timestamp) + self.logger.info(f"to {entity.code} {the_date}") + for actor_type in ActorType: + if actor_type == ActorType.private_equity or actor_type == ActorType.individual: + continue + result = get_ii_summary( + code=entity.code, report_date=the_date, org_type=actor_type_to_org_type(actor_type) + ) + if result: + summary_list = [ + { + "id": f"{entity.entity_id}_{the_date}_{actor_type.value}", + "entity_id": entity.entity_id, + "timestamp": timestamp, + "code": entity.code, + "name": entity.name, + "actor_type": actor_type.value, + "actor_count": item["TOTAL_ORG_NUM"], + "report_date": timestamp, + "report_period": to_report_period_type(timestamp), + "change_ratio": value_to_pct(item["CHANGE_RATIO"], default=1), + "is_complete": item["IS_COMPLETE"], + "holding_numbers": item["TOTAL_FREE_SHARES"], + "holding_ratio": value_to_pct(item["TOTAL_SHARES_RATIO"], default=0), + "holding_values": item["TOTAL_MARKET_CAP"], + } + for item in result + ] + df = pd.DataFrame.from_records(summary_list) + df_to_db( + data_schema=self.data_schema, + df=df, + provider=self.provider, + force_update=True, + drop_duplicates=True, + ) + + +if __name__ == "__main__": + EMStockActorSummaryRecorder(codes=["000338"]).run() + + +# the __all__ is generated +__all__ = ["EMStockActorSummaryRecorder"] diff --git a/src/zvt/recorders/em/actor/em_stock_ii_recorder.py b/src/zvt/recorders/em/actor/em_stock_ii_recorder.py new file mode 100644 index 00000000..bd5d7ed4 --- /dev/null +++ b/src/zvt/recorders/em/actor/em_stock_ii_recorder.py @@ -0,0 +1,113 @@ +# -*- coding: utf-8 -*- +from typing import List + +import pandas as pd + +from zvt.api.utils import to_report_period_type, value_to_pct +from zvt.contract import ActorType +from zvt.contract.api import df_to_db +from zvt.contract.recorder import TimestampsDataRecorder +from zvt.domain import Stock, ActorMeta +from zvt.domain.actor.stock_actor import StockInstitutionalInvestorHolder +from zvt.recorders.em.em_api import get_ii_holder_report_dates, get_ii_holder, actor_type_to_org_type +from zvt.utils.time_utils import to_pd_timestamp, to_time_str + + +# {'END_DATE': '2021-03-31 00:00:00', +# 'HOLDER_CODE': '10015776', +# 'HOLDER_CODE_OLD': '80010104', +# 'HOLDER_NAME': '香港中央结算代理人有限公司', +# 'HOLDER_RANK': 1, +# 'HOLD_NUM': 1938664086, +# 'HOLD_NUM_RATIO': 24.44, +# 'HOLD_RATIO_QOQ': '0.04093328', +# 'IS_HOLDORG': '1', +# 'SECUCODE': '000338.SZ'} + +# {'END_DATE': '2021-03-31 00:00:00', +# 'FREE_HOLDNUM_RATIO': 0.631949916991, +# 'FREE_RATIO_QOQ': '-5.33046217', +# 'HOLDER_CODE': '161606', +# 'HOLDER_CODE_OLD': '161606', +# 'HOLDER_NAME': '交通银行-融通行业景气证券投资基金', +# 'HOLDER_RANK': 10, +# 'HOLD_NUM': 39100990, +# 'IS_HOLDORG': '1', +# 'SECUCODE': '000338.SZ'} + + +class EMStockIIRecorder(TimestampsDataRecorder): + entity_provider = "em" + entity_schema = Stock + + provider = "em" + data_schema = StockInstitutionalInvestorHolder + + def init_timestamps(self, entity_item) -> List[pd.Timestamp]: + result = get_ii_holder_report_dates(code=entity_item.code) + if result: + return [to_pd_timestamp(item["REPORT_DATE"]) for item in result] + + def record(self, entity, start, end, size, timestamps): + for timestamp in timestamps: + the_date = to_time_str(timestamp) + self.logger.info(f"to {entity.code} {the_date}") + for actor_type in ActorType: + if actor_type == ActorType.private_equity or actor_type == ActorType.individual: + continue + result = get_ii_holder( + code=entity.code, report_date=the_date, org_type=actor_type_to_org_type(actor_type) + ) + if result: + holders = [ + { + "id": f'{entity.entity_id}_{the_date}_{actor_type.value}_cn_{item["HOLDER_CODE"]}', + "entity_id": entity.entity_id, + "timestamp": timestamp, + "code": entity.code, + "name": entity.name, + "actor_id": f'{actor_type.value}_cn_{item["HOLDER_CODE"]}', + "actor_type": actor_type.value, + "actor_code": item["HOLDER_CODE"], + "actor_name": f'{item["HOLDER_NAME"]}', + "report_date": timestamp, + "report_period": to_report_period_type(timestamp), + "holding_numbers": item["TOTAL_SHARES"], + "holding_ratio": value_to_pct(item["FREESHARES_RATIO"], 0), + "holding_values": item["HOLD_VALUE"], + } + for item in result + ] + df = pd.DataFrame.from_records(holders) + df_to_db( + data_schema=self.data_schema, + df=df, + provider=self.provider, + force_update=True, + drop_duplicates=True, + ) + + # save the actors + actors = [ + { + "id": f'{actor_type.value}_cn_{item["HOLDER_CODE"]}', + "entity_id": f'{actor_type.value}_cn_{item["HOLDER_CODE"]}', + "entity_type": actor_type.value, + "exchange": "cn", + "code": item["HOLDER_CODE"], + "name": f'{item["HOLDER_NAME"]}', + } + for item in result + ] + df1 = pd.DataFrame.from_records(actors) + df_to_db( + data_schema=ActorMeta, df=df1, provider=self.provider, force_update=False, drop_duplicates=True + ) + + +if __name__ == "__main__": + EMStockIIRecorder(codes=["000562"]).run() + + +# the __all__ is generated +__all__ = ["EMStockIIRecorder"] diff --git a/src/zvt/recorders/em/actor/em_stock_top_ten_free_recorder.py b/src/zvt/recorders/em/actor/em_stock_top_ten_free_recorder.py new file mode 100644 index 00000000..58034ab7 --- /dev/null +++ b/src/zvt/recorders/em/actor/em_stock_top_ten_free_recorder.py @@ -0,0 +1,126 @@ +# -*- coding: utf-8 -*- +from typing import List + +import pandas as pd + +from zvt.api.utils import to_report_period_type, value_to_pct +from zvt.contract import ActorType +from zvt.contract.api import df_to_db +from zvt.contract.recorder import TimestampsDataRecorder +from zvt.domain import Stock, ActorMeta +from zvt.domain.actor.stock_actor import StockTopTenFreeHolder, StockInstitutionalInvestorHolder +from zvt.recorders.em.em_api import get_holder_report_dates, get_free_holders +from zvt.utils.time_utils import to_pd_timestamp, to_time_str + + +class EMStockTopTenFreeRecorder(TimestampsDataRecorder): + entity_provider = "em" + entity_schema = Stock + + provider = "em" + data_schema = StockTopTenFreeHolder + + def init_timestamps(self, entity_item) -> List[pd.Timestamp]: + result = get_holder_report_dates(code=entity_item.code) + if result: + return [to_pd_timestamp(item["END_DATE"]) for item in result] + + def on_finish_entity(self, entity): + super().on_finish_entity(entity) + holders = StockTopTenFreeHolder.query_data( + entity_id=entity.id, + filters=[StockTopTenFreeHolder.holding_values == None], + session=self.session, + return_type="domain", + ) + for holder in holders: + ii = StockInstitutionalInvestorHolder.query_data( + entity_id=entity.id, + filters=[ + StockInstitutionalInvestorHolder.holding_values > 1, + StockInstitutionalInvestorHolder.holding_ratio > 0.01, + StockInstitutionalInvestorHolder.timestamp == holder.timestamp, + ], + limit=1, + return_type="domain", + ) + if ii: + holder.holding_values = holder.holding_ratio * ii[0].holding_values / ii[0].holding_ratio + self.session.commit() + + def record(self, entity, start, end, size, timestamps): + for timestamp in timestamps: + the_date = to_time_str(timestamp) + result = get_free_holders(code=entity.code, end_date=the_date) + if result: + holders = [] + new_actors = [] + for item in result: + # {'END_DATE': '2021-03-31 00:00:00', + # 'FREE_HOLDNUM_RATIO': 0.631949916991, + # 'FREE_RATIO_QOQ': '-5.33046217', + # 'HOLDER_CODE': '161606', + # 'HOLDER_CODE_OLD': '161606', + # 'HOLDER_NAME': '交通银行-融通行业景气证券投资基金', + # 'HOLDER_RANK': 10, + # 'HOLD_NUM': 39100990, + # 'IS_HOLDORG': '1', + # 'SECUCODE': '000338.SZ'} + # 机构 + if item["IS_HOLDORG"] == "1": + domains: List[ActorMeta] = ActorMeta.query_data( + filters=[ActorMeta.code == item["HOLDER_CODE"]], return_type="domain" + ) + if not domains: + actor_type = ActorType.corporation.value + actor = ActorMeta( + entity_id=f'{actor_type}_cn_{item["HOLDER_CODE"]}', + id=f'{actor_type}_cn_{item["HOLDER_CODE"]}', + entity_type=actor_type, + exchange="cn", + code=item["HOLDER_CODE"], + name=item["HOLDER_NAME"], + ) + else: + actor = domains[0] + else: + actor_type = ActorType.individual.value + actor = ActorMeta( + entity_id=f'{actor_type}_cn_{item["HOLDER_NAME"]}', + id=f'{actor_type}_cn_{item["HOLDER_NAME"]}', + entity_type=actor_type, + exchange="cn", + code=item["HOLDER_NAME"], + name=item["HOLDER_NAME"], + ) + new_actors.append(actor.__dict__) + holder = { + "id": f"{entity.entity_id}_{the_date}_{actor.entity_id}", + "entity_id": entity.entity_id, + "timestamp": timestamp, + "code": entity.code, + "name": entity.name, + "actor_id": actor.entity_id, + "actor_type": actor.entity_type, + "actor_code": actor.code, + "actor_name": actor.name, + "report_date": timestamp, + "report_period": to_report_period_type(timestamp), + "holding_numbers": item["HOLD_NUM"], + "holding_ratio": value_to_pct(item["FREE_HOLDNUM_RATIO"], 0), + } + holders.append(holder) + if holders: + df = pd.DataFrame.from_records(holders) + df_to_db(data_schema=self.data_schema, df=df, provider=self.provider, force_update=True) + if new_actors: + df = pd.DataFrame.from_records(new_actors) + df_to_db(data_schema=ActorMeta, df=df, provider=self.provider, force_update=False) + + +if __name__ == "__main__": + EMStockTopTenFreeRecorder(codes=["000338"]).run() + + +# the __all__ is generated +__all__ = ["EMStockTopTenFreeRecorder"] diff --git a/src/zvt/recorders/em/actor/em_stock_top_ten_recorder.py b/src/zvt/recorders/em/actor/em_stock_top_ten_recorder.py new file mode 100644 index 00000000..2b13a29c --- /dev/null +++ b/src/zvt/recorders/em/actor/em_stock_top_ten_recorder.py @@ -0,0 +1,116 @@ +# -*- coding: utf-8 -*- +from typing import List + +import pandas as pd + +from zvt.api.utils import to_report_period_type, value_to_pct +from zvt.contract import ActorType +from zvt.contract.api import df_to_db +from zvt.contract.recorder import TimestampsDataRecorder +from zvt.domain import Stock, ActorMeta +from zvt.domain.actor.stock_actor import StockTopTenHolder, StockInstitutionalInvestorHolder +from zvt.recorders.em.em_api import get_holder_report_dates, get_holders +from zvt.utils.time_utils import to_pd_timestamp, to_time_str + + +class EMStockTopTenRecorder(TimestampsDataRecorder): + entity_provider = "em" + entity_schema = Stock + + provider = "em" + data_schema = StockTopTenHolder + + def init_timestamps(self, entity_item) -> List[pd.Timestamp]: + result = get_holder_report_dates(code=entity_item.code) + if result: + return [to_pd_timestamp(item["END_DATE"]) for item in result] + + def on_finish_entity(self, entity): + super().on_finish_entity(entity) + holders = StockTopTenHolder.query_data( + entity_id=entity.id, + filters=[StockTopTenHolder.holding_values == None], + session=self.session, + return_type="domain", + ) + for holder in holders: + ii = StockInstitutionalInvestorHolder.query_data( + entity_id=entity.id, + filters=[ + StockInstitutionalInvestorHolder.holding_values > 1, + StockInstitutionalInvestorHolder.holding_ratio > 0.01, + StockInstitutionalInvestorHolder.timestamp == holder.timestamp, + ], + limit=1, + return_type="domain", + ) + if ii: + holder.holding_values = holder.holding_ratio * ii[0].holding_values / ii[0].holding_ratio + self.session.commit() + + def record(self, entity, start, end, size, timestamps): + for timestamp in timestamps: + the_date = to_time_str(timestamp) + result = get_holders(code=entity.code, end_date=the_date) + if result: + holders = [] + new_actors = [] + for item in result: + # 机构 + if item["IS_HOLDORG"] == "1": + domains: List[ActorMeta] = ActorMeta.query_data( + filters=[ActorMeta.code == item["HOLDER_CODE"]], return_type="domain" + ) + if not domains: + actor_type = ActorType.corporation.value + actor = ActorMeta( + entity_id=f'{actor_type}_cn_{item["HOLDER_CODE"]}', + id=f'{actor_type}_cn_{item["HOLDER_CODE"]}', + entity_type=actor_type, + exchange="cn", + code=item["HOLDER_CODE"], + name=item["HOLDER_NAME"], + ) + else: + actor = domains[0] + else: + actor_type = ActorType.individual.value + actor = ActorMeta( + entity_id=f'{actor_type}_cn_{item["HOLDER_NAME"]}', + id=f'{actor_type}_cn_{item["HOLDER_NAME"]}', + entity_type=actor_type, + exchange="cn", + code=item["HOLDER_NAME"], + name=item["HOLDER_NAME"], + ) + new_actors.append(actor.__dict__) + holder = { + "id": f"{entity.entity_id}_{the_date}_{actor.entity_id}", + "entity_id": entity.entity_id, + "timestamp": timestamp, + "code": entity.code, + "name": entity.name, + "actor_id": actor.entity_id, + "actor_type": actor.entity_type, + "actor_code": actor.code, + "actor_name": actor.name, + "report_date": timestamp, + "report_period": to_report_period_type(timestamp), + "holding_numbers": item["HOLD_NUM"], + "holding_ratio": value_to_pct(item["HOLD_NUM_RATIO"], default=0), + } + holders.append(holder) + if holders: + df = pd.DataFrame.from_records(holders) + df_to_db(data_schema=self.data_schema, df=df, provider=self.provider, force_update=True) + if new_actors: + df = pd.DataFrame.from_records(new_actors) + df_to_db(data_schema=ActorMeta, df=df, provider=self.provider, force_update=False) + + +if __name__ == "__main__": + EMStockTopTenRecorder(codes=["000002"]).run() + + +# the __all__ is generated +__all__ = ["EMStockTopTenRecorder"] diff --git a/src/zvt/recorders/em/em_api.py b/src/zvt/recorders/em/em_api.py new file mode 100644 index 00000000..ebee82c6 --- /dev/null +++ b/src/zvt/recorders/em/em_api.py @@ -0,0 +1,1203 @@ +# -*- coding: utf-8 -*- +import logging +import random +from typing import Union + +import demjson3 +import pandas as pd +import requests +import sqlalchemy +from requests import Session + +from zvt.api.kdata import generate_kdata_id +from zvt.api.utils import value_to_pct, china_stock_code_to_id +from zvt.contract import ( + ActorType, + AdjustType, + IntervalLevel, + Exchange, + TradableType, + get_entity_exchanges, + tradable_type_map_exchanges, +) +from zvt.contract.api import decode_entity_id, df_to_db +from zvt.domain import BlockCategory, StockHotTopic +from zvt.recorders.consts import DEFAULT_HEADER +from zvt.utils.time_utils import ( + to_pd_timestamp, + now_timestamp, + to_time_str, + current_date, + now_pd_timestamp, +) +from zvt.utils.utils import to_float, json_callback_param + +logger = logging.getLogger(__name__) + + +# 获取中美国债收益率 +def get_treasury_yield(pn=1, ps=2000, fetch_all=True): + results = get_em_data( + request_type="RPTA_WEB_TREASURYYIELD", + source=None, + fields="ALL", + sort_by="SOLAR_DATE", + sort="desc", + pn=pn, + ps=ps, + fetch_all=fetch_all, + ) + yields = [] + for item in results: + date = item["SOLAR_DATE"] + # 中国 + yields.append( + { + "id": f"country_galaxy_CN_{to_time_str(date)}", + "entity_id": "country_galaxy_CN", + "timestamp": to_pd_timestamp(date), + "code": "CN", + "yield_2": item.get("EMM00588704"), + "yield_5": item.get("EMM00166462"), + "yield_10": item.get("EMM00166466"), + "yield_30": item.get("EMM00166469"), + } + ) + yields.append( + { + "id": f"country_galaxy_US_{to_time_str(date)}", + "entity_id": "country_galaxy_US", + "timestamp": to_pd_timestamp(date), + "code": "US", + "yield_2": item.get("EMG00001306"), + "yield_5": item.get("EMG00001308"), + "yield_10": item.get("EMG00001310"), + "yield_30": item.get("EMG00001312"), + } + ) + return yields + + +# 机构持仓日期 +def get_ii_holder_report_dates(code): + return get_em_data( + request_type="RPT_F10_MAIN_ORGHOLD", + fields="REPORT_DATE,IS_COMPLETE", + filters=generate_filters(code=code), + sort_by="REPORT_DATE", + sort="desc", + ) + + +def get_dragon_and_tiger_list(start_date, end_date=None): + start_date = to_time_str(start_date) + if not end_date: + end_date = now_timestamp() + end_date = to_time_str(end_date) + return get_em_data( + request_type="RPT_DAILYBILLBOARD_DETAILS", + fields="ALL", + source="DataCenter", + filters=f"(TRADE_DATE>='{start_date}')(TRADE_DATE<='{end_date}')", + sort_by="TRADE_DATE,SECURITY_CODE", + sort="asc,asc", + ) + + +# 龙虎榜 +def get_dragon_and_tiger(code, start_date=None): + return get_em_data( + request_type="RPT_OPERATEDEPT_TRADE", + fields="TRADE_ID,TRADE_DATE,EXPLANATION,SECUCODE,SECURITY_CODE,SECURITY_NAME_ABBR,ACCUM_AMOUNT,CHANGE_RATE,NET_BUY,BUY_BUY_TOTAL,BUY_SELL_TOTAL,BUY_RATIO_TOTAL,SELL_BUY_TOTAL,SELL_SELL_TOTAL,SELL_RATIO_TOTAL,TRADE_DIRECTION,RANK,OPERATEDEPT_NAME,BUY_AMT_REAL,SELL_AMT_REAL,BUY_RATIO,SELL_RATIO,BUY_TOTAL,SELL_TOTAL,BUY_TOTAL_NET,SELL_TOTAL_NET,NET", + filters=generate_filters(code=code, trade_date=start_date, field_op={"trade_date": ">="}), + params='(groupField=TRADE_ID)(groupedFields=TRADE_DIRECTION,RANK,OPERATEDEPT_NAME,BUY_AMT_REAL,SELL_AMT_REAL,BUY_RATIO,SELL_RATIO,NET")(groupListName="LIST")', + sort_by="TRADE_DATE,RANK", + sort="asc,asc", + ) + + +# 十大股东持仓日期 +def get_holder_report_dates(code): + return get_em_data( + request_type="RPT_F10_EH_HOLDERSDATE", + fields="END_DATE,IS_DEFAULT,IS_REPORTDATE", + filters=generate_filters(code=code), + sort_by="END_DATE", + sort="desc", + ) + + +# 十大流通股东日期 +def get_free_holder_report_dates(code): + return get_em_data( + request_type="RPT_F10_EH_FREEHOLDERSDATE", + fields="END_DATE,IS_DEFAULT,IS_REPORTDATE", + filters=generate_filters(code=code), + sort_by="END_DATE", + sort="desc", + ) + + +# https://datacenter.eastmoney.com/securities/api/data/get?type=RPT_F10_EH_RELATION&sty=SECUCODE%2CHOLDER_NAME%2CRELATED_RELATION%2CHOLD_RATIO&filter=(SECUCODE%3D%22601162.SH%22)&client=APP&source=SECURITIES&p=1&ps=200&rdm=rnd_01BE6995104944ED99B70EEB7FFC0353&v=012649539724458458 +# https://datacenter.eastmoney.com/securities/api/data/get?type=RPT_F10_FREE_TOTALHOLDNUM&sty=SECUCODE%2CSECURITY_CODE%2CEND_DATE%2CHOLD_NUM_COUNT%2CHOLD_RATIO_COUNT%2CHOLD_RATIO_CHANGE&filter=(SECUCODE%3D%22601162.SH%22)(END_DATE%3D%272024-09-30%27)&client=APP&source=SECURITIES&p=1&ps=200&sr=1&st=&rdm=rnd_FA1943FA30474E3AA0CCF206EA1B5749&v=032098454407366983 +def get_controlling_shareholder(code): + return get_em_data( + request_type="RPT_F10_EH_RELATION", + fields="SECUCODE,CHOLDER_NAME,CRELATED_RELATION,CHOLD_RATIO", + filters=generate_filters(code=code), + ) + + +# 机构持仓 +def get_ii_holder(code, report_date, org_type): + return get_em_data( + request_type="RPT_MAIN_ORGHOLDDETAIL", + fields="SECURITY_CODE,REPORT_DATE,HOLDER_CODE,HOLDER_NAME,TOTAL_SHARES,HOLD_VALUE,FREESHARES_RATIO,ORG_TYPE,SECUCODE,FUND_DERIVECODE", + filters=generate_filters(code=code, report_date=report_date, org_type=org_type), + ) + + +# 机构持仓汇总 +def get_ii_summary(code, report_date, org_type): + return get_em_data( + request_type="RPT_F10_MAIN_ORGHOLDDETAILS", + fields="SECURITY_CODE,SECUCODE,REPORT_DATE,ORG_TYPE,TOTAL_ORG_NUM,TOTAL_FREE_SHARES,TOTAL_MARKET_CAP,TOTAL_SHARES_RATIO,CHANGE_RATIO,IS_COMPLETE", + filters=generate_filters(code=code, report_date=report_date, org_type=org_type), + ) + + +def get_free_holders(code, end_date): + return get_em_data( + request_type="RPT_F10_EH_FREEHOLDERS", + fields="SECUCODE,END_DATE,HOLDER_NAME,HOLDER_CODE,HOLDER_CODE_OLD,HOLD_NUM,FREE_HOLDNUM_RATIO,FREE_RATIO_QOQ,IS_HOLDORG,HOLDER_RANK", + filters=generate_filters(code=code, end_date=end_date), + sort_by="HOLDER_RANK", + ) + + +def get_top_ten_free_holder_stats(code): + datas = get_holder_report_dates(code=code) + if datas: + end_date = to_time_str(datas[0]["END_DATE"]) + holders = get_em_data( + request_type="RPT_F10_FREE_TOTALHOLDNUM", + fields="SECUCODE,SECURITY_CODE,END_DATE,HOLD_NUM_COUNT,HOLD_RATIO_COUNT,HOLD_RATIO_CHANGE,", + filters=generate_filters(code=code, end_date=end_date), + ) + if holders: + holder = holders[0] + ratio = 0 + change = 0 + try: + if holder["HOLD_RATIO_COUNT"]: + ratio = holder["HOLD_RATIO_COUNT"] / 100 + if holder["HOLD_RATIO_CHANGE"]: + change = holder["HOLD_RATIO_CHANGE"] / 100 + except Exception as e: + logger.warning(f"Wrong holder {holder}", e) + + return { + "code": code, + "timestamp": end_date, + "ratio": ratio, + "change": change, + } + + +def get_controlling_shareholder(code): + holders = get_em_data( + request_type="RPT_F10_EH_RELATION", + fields="SECUCODE,HOLDER_NAME,RELATED_RELATION,HOLD_RATIO", + filters=generate_filters(code=code), + ) + + if holders: + control = {"ratio": 0} + + for holder in holders: + if holder["RELATED_RELATION"] == "控股股东": + control["holder"] = holder["HOLDER_NAME"] + elif holder["RELATED_RELATION"] == "实际控制人": + control["parent"] = holder["HOLDER_NAME"] + if holder["HOLD_RATIO"]: + control["ratio"] = control["ratio"] + holder["HOLD_RATIO"] + return control + + +def get_holders(code, end_date): + return get_em_data( + request_type="RPT_F10_EH_HOLDERS", + fields="SECUCODE,END_DATE,HOLDER_NAME,HOLDER_CODE,HOLDER_CODE_OLD,HOLD_NUM,HOLD_NUM_RATIO,HOLD_RATIO_QOQ,HOLDER_RANK,IS_HOLDORG", + filters=generate_filters(code=code, end_date=end_date), + sort_by="HOLDER_RANK", + ) + + +def _order_param(order: str): + if order: + orders = order.split(",") + return ",".join(["1" if item == "asc" else "-1" for item in orders]) + return order + + +def get_url(type, sty, source="SECURITIES", filters=None, order_by="", order="asc", pn=1, ps=2000, params=None): + # 根据 url 映射如下 + # type=RPT_F10_MAIN_ORGHOLDDETAILS + # sty=SECURITY_CODE,SECUCODE,REPORT_DATE,ORG_TYPE,TOTAL_ORG_NUM,TOTAL_FREE_SHARES,TOTAL_MARKET_CAP,TOTAL_SHARES_RATIO,CHANGE_RATIO,IS_COMPLETE + # filter=(SECUCODE="000338.SZ")(REPORT_DATE=\'2021-03-31\')(ORG_TYPE="01") + # sr=1 + # st= + sr = _order_param(order=order) + v = random.randint(1000000000000000, 9000000000000000) + + if filters or source: + url = f"https://datacenter.eastmoney.com/securities/api/data/get?type={type}&sty={sty}&filter={filters}&client=APP&source={source}&p={pn}&ps={ps}&sr={sr}&st={order_by}&v=0{v}" + else: + url = f"https://datacenter.eastmoney.com/api/data/get?type={type}&sty={sty}&st={order_by}&sr={sr}&p={pn}&ps={ps}&_={now_timestamp()}" + + if params: + url = url + f"¶ms={params}" + + return url + + +def get_exchange(code): + code_ = int(code) + if 800000 >= code_ >= 600000: + return "SH" + elif code_ >= 400000: + return "BJ" + else: + return "SZ" + + +def actor_type_to_org_type(actor_type: ActorType): + if actor_type == ActorType.raised_fund: + return "01" + if actor_type == ActorType.qfii: + return "02" + if actor_type == ActorType.social_security: + return "03" + if actor_type == ActorType.broker: + return "04" + if actor_type == ActorType.insurance: + return "05" + if actor_type == ActorType.trust: + return "06" + if actor_type == ActorType.corporation: + return "07" + assert False + + +def generate_filters(code=None, trade_date=None, report_date=None, end_date=None, org_type=None, field_op: dict = None): + args = [item for item in locals().items() if item[1] and (item[0] not in ("code", "org_type", "field_op"))] + + result = "" + if code: + result += f'(SECUCODE="{code}.{get_exchange(code)}")' + if org_type: + result += f'(ORG_TYPE="{org_type}")' + + for arg in args: + field = arg[0] + value = arg[1] + if field_op: + op = field_op.get(field, "=") + else: + op = "=" + result += f"({field.upper()}{op}'{value}')" + + return result + + +def get_em_data( + request_type, + fields, + session=None, + source="SECURITIES", + filters=None, + sort_by="", + sort="asc", + pn=1, + ps=2000, + fetch_all=True, + fetch_count=1, + params=None, +): + url = get_url( + type=request_type, + sty=fields, + source=source, + filters=filters, + order_by=sort_by, + order=sort, + pn=pn, + ps=ps, + params=params, + ) + logger.debug(f"current url: {url}") + if session: + resp = session.get(url) + else: + resp = requests.get(url) + if resp.status_code == 200: + json_result = resp.json() + resp.close() + + if json_result: + if json_result.get("result"): + data: list = json_result["result"]["data"] + need_next = pn < json_result["result"]["pages"] + elif json_result.get("data"): + data: list = json_result["data"] + need_next = json_result["hasNext"] == 1 + else: + data = [] + need_next = False + if fetch_all or fetch_count - 1 > 0: + if need_next: + next_data = get_em_data( + session=session, + request_type=request_type, + fields=fields, + source=source, + filters=filters, + sort_by=sort_by, + sort=sort, + pn=pn + 1, + ps=ps, + fetch_all=fetch_all, + fetch_count=fetch_count - 1, + params=params, + ) + if next_data: + data = data + next_data + return data + else: + return data + else: + return data + else: + return data + return None + raise RuntimeError(f"request em data code: {resp.status_code}, error: {resp.text}") + + +def get_quotes(): + { + # 市场,2 A股, 3 港股 + "f1": 2, + # 最新价 660/100=6.6 + "f2": 660, + # 涨幅 2000/10000=20% + "f3": 2000, + # 涨跌额 110/100=1.1 + "f4": 110, + # 总手 + "f5": 112596, + # 成交额 + "f6": 74313472.2, + # 换手率 239/10000 + "f8": 239, + # 市盈率 110 + "f9": 11000, + # code + "f12": "300175", + # + "f13": 0, + # name + "f14": "朗源股份", + "f18": 550, + "f19": 80, + "f30": -215, + # 买入价 + "f31": 660, + # 卖出价 + "f32": None, + "f125": 0, + "f139": 5, + "f148": 1, + "f152": 2, + } + { + "f1": 2, + "f2": 1515, + "f3": 1002, + "f4": 138, + "f5": 547165, + "f6": 804705199.0, + "f8": 241, + "f9": 1575, + "f12": "601233", + "f13": 1, + "f14": "桐昆股份", + "f18": 1377, + "f19": 2, + "f30": -1281, + # 买入价 + "f31": 1515, + # 卖出价 + "f32": None, + "f125": 0, + "f139": 2, + "f148": 577, + "f152": 2, + } + { + "f1": 2, + "f2": 611, + "f3": 338, + "f4": 20, + "f5": 478746, + "f6": 293801314.14, + "f8": 803, + "f9": 2067, + "f12": "000788", + "f13": 0, + "f14": "北大医药", + "f18": 591, + "f19": 6, + "f30": -4015, + "f31": 611, + "f32": 612, + "f125": 0, + "f139": 2, + "f148": 1, + "f152": 2, + } + + +# quote +# url = 'https://push2his.eastmoney.com/api/qt/stock/kline/get?' +# 日线 klt=101 +# 周线 klt=102 +# 月线 klt=103 +# +# limit lmt=2000 +# +# 结束时间 end=20500000 +# +# 复权 fqt 0 不复权 1 前复权 2 后复权 +# iscca +# +# 字段 +# f51,f52,f53,f54,f55, +# timestamp,open,close,high,low +# f56,f57,f58,f59,f60,f61,f62,f63,f64 +# volume,turnover,震幅,change_pct,change,turnover_rate +# 深圳 +# secid=0.399001&klt=101&fqt=1&lmt=66&end=20500000&iscca=1&fields1=f1,f2,f3,f4,f5,f6,f7,f8&fields2=f51,f52,f53,f54,f55,f56,f57,f58,f59,f60,f61,f62,f63,f64&ut=f057cbcbce2a86e2866ab8877db1d059&forcect=1 +# secid=0.399001&klt=102&fqt=1&lmt=66&end=20500000&iscca=1&fields1=f1,f2,f3,f4,f5,f6,f7,f8&fields2=f51,f52,f53,f54,f55,f56,f57,f58,f59,f60,f61,f62,f63,f64&ut=f057cbcbce2a86e2866ab8877db1d059&forcect=1 +# secid=0.000338&klt=101&fqt=1&lmt=66&end=20500000&iscca=1&fields1=f1,f2,f3,f4,f5,f6,f7,f8&fields2=f51,f52,f53,f54,f55,f56,f57,f58,f59,f60,f61,f62,f63,f64&ut=f057cbcbce2a86e2866ab8877db1d059&forcect=1 +# +# 港股 +# secid=116.01024&klt=102&fqt=1&lmt=66&end=20500000&iscca=1&fields1=f1,f2,f3,f4,f5,f6,f7,f8&fields2=f51,f52,f53,f54,f55,f56,f57,f58,f59,f60,f61,f62,f63,f64&ut=f057cbcbce2a86e2866ab8877db1d059&forcect=1 +# 美股 +# secid=106.BABA&klt=102&fqt=1&lmt=66&end=20500000&iscca=1&fields1=f1,f2,f3,f4,f5,f6,f7,f8&fields2=f51,f52,f53,f54,f55,f56,f57,f58,f59,f60,f61,f62,f63,f64&ut=f057cbcbce2a86e2866ab8877db1d059&forcect=1 +# +# 上海 +# secid=1.512660&klt=101&fqt=1&lmt=66&end=20500000&iscca=1&fields1=f1,f2,f3,f4,f5,f6,f7,f8&fields2=f51,f52,f53,f54,f55,f56,f57,f58,f59,f60,f61,f62,f63,f64&ut=f057cbcbce2a86e2866ab8877db1d059&forcect=1 +def get_kdata(entity_id, session=None, level=IntervalLevel.LEVEL_1DAY, adjust_type=AdjustType.qfq, limit=10000): + entity_type, exchange, code = decode_entity_id(entity_id) + level = IntervalLevel(level) + + sec_id = to_em_sec_id(entity_id) + fq_flag = to_em_fq_flag(adjust_type) + level_flag = to_em_level_flag(level) + # f131 结算价 + # f133 持仓 + # 目前未获取 + url = f"https://push2his.eastmoney.com/api/qt/stock/kline/get?secid={sec_id}&klt={level_flag}&fqt={fq_flag}&lmt={limit}&end=20500000&iscca=1&fields1=f1,f2,f3,f4,f5,f6,f7,f8&fields2=f51,f52,f53,f54,f55,f56,f57,f58,f59,f60,f61,f62,f63,f64&ut=f057cbcbce2a86e2866ab8877db1d059&forcect=1" + + if session: + resp = session.get(url, headers=DEFAULT_HEADER) + else: + resp = requests.get(url, headers=DEFAULT_HEADER) + resp.raise_for_status() + results = resp.json() + resp.close() + data = results["data"] + + kdatas = [] + + if data: + klines = data["klines"] + name = data["name"] + + for result in klines: + # "2000-01-28,1005.26,1012.56,1173.12,982.13,3023326,3075552000.00" + # "2021-08-27,19.39,20.30,20.30,19.25,1688497,3370240912.00,5.48,6.01,1.15,3.98,0,0,0" + # time,open,close,high,low,volume,turnover + # "2022-04-13,10708,10664,10790,10638,402712,43124771328,1.43,0.57,60,0.00,4667112399583576064,4690067230254170112,1169270784" + fields = result.split(",") + the_timestamp = to_pd_timestamp(fields[0]) + + the_id = generate_kdata_id(entity_id=entity_id, timestamp=the_timestamp, level=level) + + open = to_float(fields[1]) + close = to_float(fields[2]) + high = to_float(fields[3]) + low = to_float(fields[4]) + volume = to_float(fields[5]) + turnover = to_float(fields[6]) + # 7 振幅 + change_pct = value_to_pct(to_float(fields[8])) + # 9 变动 + turnover_rate = value_to_pct(to_float(fields[10])) + + kdatas.append( + dict( + id=the_id, + timestamp=the_timestamp, + entity_id=entity_id, + provider="em", + code=code, + name=name, + level=level.value, + open=open, + close=close, + high=high, + low=low, + volume=volume, + turnover=turnover, + turnover_rate=turnover_rate, + change_pct=change_pct, + ) + ) + if kdatas: + df = pd.DataFrame.from_records(kdatas) + return df + + +def get_basic_info(entity_id): + entity_type, exchange, code = decode_entity_id(entity_id) + if entity_type == "stock": + url = "https://emh5.eastmoney.com/api/GongSiGaiKuang/GetJiBenZiLiao" + result_field = "JiBenZiLiao" + elif entity_type == "stockus": + url = "https://emh5.eastmoney.com/api/MeiGu/GaiKuang/GetZhengQuanZiLiao" + result_field = "ZhengQuanZiLiao" + elif entity_type == "stockhk": + url = "https://emh5.eastmoney.com/api/GangGu/GaiKuang/GetZhengQuanZiLiao" + result_field = "ZhengQuanZiLiao" + else: + assert False + + data = {"fc": to_em_fc(entity_id=entity_id), "color": "w"} + resp = requests.post(url=url, json=data, headers=DEFAULT_HEADER) + + resp.raise_for_status() + resp.close() + + return resp.json()["Result"][result_field] + + +def get_future_list(): + # 主连 + url = f"https://futsseapi.eastmoney.com/list/filter/2?fid=sp_all&mktid=0&typeid=0&pageSize=1000&pageIndex=0&callbackName=jQuery34106875017735118845_1649736551642&sort=asc&orderBy=idx&_={now_timestamp()}" + resp = requests.get(url, headers=DEFAULT_HEADER) + resp.raise_for_status() + result = json_callback_param(resp.text) + resp.close() + # [['DCE', 'im'], ['SHFE', 'rbm'], ['SHFE', 'hcm'], ['SHFE', 'ssm'], ['CZCE', 'SFM'], ['CZCE', 'SMM'], ['SHFE', 'wrm'], ['SHFE', 'cum'], ['SHFE', 'alm'], ['SHFE', 'znm'], ['SHFE', 'pbm'], ['SHFE', 'nim'], ['SHFE', 'snm'], ['INE', 'bcm'], ['SHFE', 'aum'], ['SHFE', 'agm'], ['DCE', 'am'], ['DCE', 'bm'], ['DCE', 'ym'], ['DCE', 'mm'], ['CZCE', 'RSM'], ['CZCE', 'OIM'], ['CZCE', 'RMM'], ['DCE', 'pm'], ['DCE', 'cm'], ['DCE', 'csm'], ['DCE', 'jdm'], ['CZCE', 'CFM'], ['CZCE', 'CYM'], ['CZCE', 'SRM'], ['CZCE', 'APM'], ['CZCE', 'CJM'], ['CZCE', 'PKM'], ['CZCE', 'PMM'], ['CZCE', 'WHM'], ['DCE', 'rrm'], ['CZCE', 'JRM'], ['CZCE', 'RIM'], ['CZCE', 'LRM'], ['DCE', 'lhm'], ['INE', 'scm'], ['SHFE', 'fum'], ['DCE', 'pgm'], ['INE', 'lum'], ['SHFE', 'bum'], ['CZCE', 'MAM'], ['DCE', 'egm'], ['DCE', 'lm'], ['CZCE', 'TAM'], ['DCE', 'vm'], ['DCE', 'ppm'], ['DCE', 'ebm'], ['CZCE', 'SAM'], ['CZCE', 'FGM'], ['CZCE', 'URM'], ['SHFE', 'rum'], ['INE', 'nrm'], ['SHFE', 'spm'], ['DCE', 'fbm'], ['DCE', 'bbm'], ['CZCE', 'PFM'], ['DCE', 'jmm'], ['DCE', 'jm'], ['CZCE', 'ZCM'], ['8', '060120'], ['8', '040120'], ['8', '070120'], ['8', '110120'], ['8', '050120'], ['8', '130120']] + futures = [] + for item in result["list"]: + entity = {} + entity["exchange"], entity["code"] = item["uid"].split("|") + + # {'8', 'CZCE', 'DCE', 'INE', 'SHFE'} + if entity["exchange"] == "8": + entity["exchange"] = "cffex" + entity["code"] = to_zvt_code(entity["code"]) + else: + try: + entity["exchange"] = Exchange(entity["exchange"].lower()).value + if entity["code"][-1].lower() == "m": + entity["code"] = entity["code"][:-1] + else: + assert False + entity["code"] = entity["code"].upper() + except Exception as e: + logger.error(f"wrong item: {item}", e) + continue + + entity["entity_type"] = "future" + entity["name"] = item["name"] + entity["id"] = f"future_{entity['exchange']}_{entity['code']}" + entity["entity_id"] = entity["id"] + futures.append(entity) + df = pd.DataFrame.from_records(data=futures) + return df + + +def _calculate_limit(row): + code = row["code"] + change_pct = row["change_pct"] + if code.startswith(("83", "87", "88", "889", "82", "920")): + return change_pct >= 0.29, change_pct <= -0.29 + elif code.startswith("300") or code.startswith("301") or code.startswith("688"): + return change_pct >= 0.19, change_pct <= -0.19 + else: + return change_pct > 0.09, change_pct < -0.09 + + +def get_stock_turnover(): + sz_url = "https://push2his.eastmoney.com/api/qt/stock/trends2/get?fields1=f1,f2&fields2=f51,f57&ut=fa5fd1943c7b386f172d6893dbfba10b&iscr=0&iscca=0&secid=0.399001&time=0&ndays=2" + resp = requests.get(sz_url, headers=DEFAULT_HEADER) + + resp.raise_for_status() + + data = resp.json()["data"]["trends"] + resp.close() + return data + + +def get_top_tradable_list(entity_type, fields, limit, entity_flag, exchange=None, return_quote=False): + url = f"https://push2.eastmoney.com/api/qt/clist/get?np=1&fltt=2&invt=2&fields={fields}&pn=1&pz={limit}&fid=f3&po=1&{entity_flag}&ut=f057cbcbce2a86e2866ab8877db1d059&forcect=1&cb=cbCallbackMore&&callback=jQuery34109676853980006124_{now_timestamp() - 1}&_={now_timestamp()}" + resp = requests.get(url, headers=DEFAULT_HEADER) + + resp.raise_for_status() + + result = json_callback_param(resp.text) + resp.close() + data = result["data"]["diff"] + df = pd.DataFrame.from_records(data=data) + + if return_quote: + df = df[["f12", "f14", "f2", "f3", "f5", "f8", "f6", "f15", "f16", "f17", "f20", "f21"]] + df.columns = [ + "code", + "name", + "price", + "change_pct", + "volume", + "turnover_rate", + "turnover", + "high", + "low", + "open", + "total_cap", + "float_cap", + ] + + df = df.dropna() + df = df[df.change_pct != "-"] + df = df[df.turnover_rate != "-"] + df = df[df.turnover != "-"] + + df = df.astype({"change_pct": "float", "turnover_rate": "float", "turnover": "float", "volume": "float"}) + + df["change_pct"] = df["change_pct"] / 100 + df["turnover_rate"] = df["turnover_rate"] / 100 + df["volume"] = df["volume"] * 100 + + df[["is_limit_up", "is_limit_down"]] = df.apply(lambda row: _calculate_limit(row), axis=1, result_type="expand") + + else: + if entity_type == TradableType.stock: + df = df[["f12", "f13", "f14", "f20", "f21", "f9", "f23"]] + df.columns = ["code", "exchange", "name", "cap", "cap1", "pe", "pb"] + df[["cap", "cap1", "pe", "pb"]] = df[["cap", "cap1", "pe", "pb"]].apply(pd.to_numeric, errors="coerce") + else: + df = df[["f12", "f13", "f14"]] + df.columns = ["code", "exchange", "name"] + if exchange: + df["exchange"] = exchange.value + df["entity_type"] = entity_type.value + df["id"] = df[["entity_type", "exchange", "code"]].apply(lambda x: "_".join(x.astype(str)), axis=1) + df["entity_id"] = df["id"] + + return df + + +def get_top_stocks(limit=100): + # 沪深和北交所 + entity_flag = "fs=m:0+t:6+f:!2,m:0+t:13+f:!2,m:0+t:80+f:!2,m:1+t:2+f:!2,m:1+t:23+f:!2,m:0+t:81+s:2048" + + fields = "f2,f3,f5,f6,f8,f12,f14,f15,f16,f17,f20,f21" + return get_top_tradable_list( + entity_type=TradableType.stock, fields=fields, limit=limit, entity_flag=entity_flag, return_quote=True + ) + + +def get_top_stockhks(limit=20): + entity_flag = "fs=b:DLMK0144,b:DLMK0146" + fields = "f2,f3,f5,f6,f8,f12,f14,f15,f16,f17,f20,f21" + return get_top_tradable_list( + entity_type=TradableType.stockhk, fields=fields, limit=limit, entity_flag=entity_flag, return_quote=True + ) + + +def get_tradable_list( + entity_type: Union[TradableType, str] = "stock", + exchange: Union[Exchange, str] = None, + limit: int = 10000, + hk_south=False, + block_category=BlockCategory.concept, +): + entity_type = TradableType(entity_type) + if entity_type == TradableType.future: + return get_future_list() + + exchanges = get_entity_exchanges(entity_type=entity_type) + + if exchange is not None: + assert exchange in exchanges + exchanges = [exchange] + + dfs = [] + for exchange in exchanges: + exchange = Exchange(exchange) + ex_flag = to_em_entity_flag(exchange=exchange) + entity_flag = f"fs=m:{ex_flag}" + + if entity_type == TradableType.index: + if exchange == Exchange.sh: + entity_flag = "fs=i:1.000001,i:1.000002,i:1.000003,i:1.000009,i:1.000010,i:1.000011,i:1.000012,i:1.000016,i:1.000300,i:1.000903,i:1.000905,i:1.000906,i:1.000688,i:1.000852,i:2.932000" + if exchange == Exchange.sz: + entity_flag = "fs=i:0.399001,i:0.399002,i:0.399003,i:0.399004,i:0.399005,i:0.399006,i:0.399100,i:0.399106,i:0.399305,i:0.399550" + elif entity_type == TradableType.currency: + entity_flag = "fs=m:119,m:120" + elif entity_type == TradableType.indexus: + # 纳斯达克,道琼斯,标普500,美元指数 + entity_flag = "fs=i:100.NDX,i:100.DJIA,i:100.SPX,i:100.UDI" + elif entity_type == TradableType.cbond: + if exchange == Exchange.sz: + entity_flag = "fs=m:0+e:11" + elif exchange == Exchange.sh: + entity_flag = "fs=m:1+e:11" + else: + assert False + # m为交易所代码,t为交易类型 + elif entity_type in [TradableType.block, TradableType.stock, TradableType.stockus, TradableType.stockhk]: + if exchange == Exchange.sh: + # t=2 主板 + # t=23 科创板 + entity_flag = "fs=m:1+t:2,m:1+t:23" + if exchange == Exchange.sz: + # t=6 主板 + # t=80 创业板 + entity_flag = "fs=m:0+t:6,m:0+t:13,m:0+t:80" + if exchange == Exchange.bj: + entity_flag = "fs=m:0+t:81+s:2048" + if exchange == Exchange.hk: + if hk_south: + # 港股通 + entity_flag = "fs=b:DLMK0144,b:DLMK0146" + else: + # t=3 主板 + # t=4 创业板 + entity_flag = f"fs=m:116+t:3,m:116+t:4" + if exchange == Exchange.nasdaq: + # t=1 + # t=3 中概股 + entity_flag = f"fs=m:105+t:1,m:105+t:3" + if exchange == Exchange.nyse: + # t=1 + # t=3 中概股 + entity_flag = f"fs=m:106+t:1,m:105+t:3" + if exchange == Exchange.cn: + if block_category == BlockCategory.industry: + entity_flag = entity_flag + "+t:2" + elif block_category == BlockCategory.concept: + entity_flag = entity_flag + "+t:3" + else: + assert False + + # f2, f3, f4, f12, f13, f14, f19, f111, f148 + fields = "f1,f2,f3,f4,f12,f13,f14" + if entity_type in (TradableType.stock, TradableType.stockhk): + # 市值,流通市值,pe,pb + fields = fields + ",f20,f21,f9,f23" + + df = get_top_tradable_list( + entity_type=entity_type, fields=fields, limit=limit, entity_flag=entity_flag, exchange=exchange + ) + if entity_type == TradableType.block: + df["category"] = block_category.value + + dfs.append(df) + + return pd.concat(dfs) + + +def get_block_stocks(block_id, name="", session=None): + entity_type, exchange, code = decode_entity_id(block_id) + category_stocks_url = f"http://48.push2.eastmoney.com/api/qt/clist/get?cb=jQuery11240710111145777397_{now_timestamp() - 1}&pn=1&pz=1000&po=1&np=1&ut=bd1d9ddb04089700cf9c27f6f7426281&fltt=2&invt=2&wbp2u=4668014655929990|0|1|0|web&fid=f3&fs=b:{code}+f:!50&fields=f1,f2,f3,f4,f5,f6,f7,f8,f9,f10,f12,f13,f14,f15,f16,f17,f18,f20,f21,f23,f24,f25,f22,f11,f62,f128,f136,f115,f152,f45&_={now_timestamp()}" + if session: + resp = session.get(category_stocks_url, headers=DEFAULT_HEADER) + else: + resp = requests.get(category_stocks_url, headers=DEFAULT_HEADER) + + data = json_callback_param(resp.text)["data"] + the_list = [] + if data: + results = data["diff"] + for result in results: + stock_code = result["f12"] + stock_name = result["f14"] + stock_id = china_stock_code_to_id(stock_code) + + the_list.append( + { + "id": "{}_{}".format(block_id, stock_id), + "entity_id": block_id, + "entity_type": "block", + "exchange": exchange, + "code": code, + "name": name, + "timestamp": current_date(), + "stock_id": stock_id, + "stock_code": stock_code, + "stock_name": stock_name, + } + ) + return the_list + + +def market_code_to_entity_id(market, code): + if market in (0, 1): + return china_stock_code_to_id(code) + elif market == 105: + return f"stockus_nasdaq_{code}" + elif market == 106: + return f"stockus_nyse_{code}" + elif market == 116: + return f"stockhk_hk_{code}" + else: + for exchange, flag in exchange_map_em_flag.items(): + if flag == market: + for entity_type, exchanges in tradable_type_map_exchanges.items(): + if exchange in exchanges: + return f"{entity_type.value}_{exchange.value}_{code}" + return code + + +def get_hot_topic(session: Session = None): + url = "https://emcreative.eastmoney.com/FortuneApi/GuBaApi/common" + data = { + "url": "newctopic/api/Topic/HomeTopicRead?deviceid=IPHONE&version=10001000&product=Guba&plat=Iphone&p=1&ps=20&needPkPost=true", + "type": "get", + "parm": "", + } + logger.debug(f"get hot topic from: {url}") + if session: + resp = session.post(url=url, json=data, headers=DEFAULT_HEADER) + else: + resp = requests.post(url=url, json=data, headers=DEFAULT_HEADER) + + if resp.status_code == 200: + data_list = resp.json().get("re") + if data_list: + hot_topics = [] + for position, data in enumerate(data_list): + if data["stockList"]: + entity_ids = [ + market_code_to_entity_id(market=stock["qMarket"], code=stock["qCode"]) + for stock in data["stockList"] + ] + else: + entity_ids = [] + topic_id = data["topicid"] + entity_id = f"hot_topic_{topic_id}" + hot_topics.append( + { + "id": entity_id, + "entity_id": entity_id, + "timestamp": now_pd_timestamp(), + "created_timestamp": to_pd_timestamp(data["cTime"]), + "position": position, + "entity_ids": entity_ids, + "news_code": topic_id, + "news_title": data["name"], + "news_content": data["summary"], + } + ) + return hot_topics + + logger.error(f"request em data code: {resp.status_code}, error: {resp.text}") + + +def record_hot_topic(): + hot_topics = get_hot_topic() + logger.debug(hot_topics) + if hot_topics: + df = pd.DataFrame.from_records(hot_topics) + df_to_db( + df=df, data_schema=StockHotTopic, provider="em", force_update=True, dtype={"entity_ids": sqlalchemy.JSON} + ) + + +def get_news(entity_id, ps=200, index=1, start_timestamp=None, session=None, latest_code=None): + sec_id = to_em_sec_id(entity_id=entity_id) + url = f"https://np-listapi.eastmoney.com/comm/wap/getListInfo?cb=callback&client=wap&type=1&mTypeAndCode={sec_id}&pageSize={ps}&pageIndex={index}&callback=jQuery1830017478247906740352_{now_timestamp() - 1}&_={now_timestamp()}" + logger.debug(f"get news from: {url}") + if session: + resp = session.get(url) + else: + resp = requests.get(url) + # { + # "Art_ShowTime": "2022-02-11 14:29:25", + # "Art_Image": "", + # "Art_MediaName": "每日经济新闻", + # "Art_Code": "202202112274017262", + # "Art_Title": "潍柴动力:巴拉德和锡里斯不纳入合并财务报表范围", + # "Art_SortStart": "1644560965017262", + # "Art_VideoCount": 0, + # "Art_OriginUrl": "http://finance.eastmoney.com/news/1354,202202112274017262.html", + # "Art_Url": "http://finance.eastmoney.com/a/202202112274017262.html", + # } + if resp.status_code == 200: + json_text = resp.text[resp.text.index("(") + 1 : resp.text.rindex(")")] + if "list" in demjson3.decode(json_text)["data"]: + json_result = demjson3.decode(json_text)["data"]["list"] + resp.close() + if json_result: + news = [ + { + "id": f'{entity_id}_{item.get("Art_ShowTime", "")}', + "entity_id": entity_id, + "timestamp": to_pd_timestamp(item.get("Art_ShowTime", "")), + "news_code": item.get("Art_Code", ""), + "news_url": item.get("Art_Url", ""), + "news_title": item.get("Art_Title", ""), + "ignore_by_user": False, + } + for index, item in enumerate(json_result) + if not start_timestamp + or ( + (to_pd_timestamp(item["Art_ShowTime"]) >= start_timestamp) + and (item.get("Art_Code", "") != latest_code) + ) + ] + if len(news) < len(json_result): + return news + next_data = get_news(entity_id=entity_id, ps=ps, index=index + 1) + if next_data: + return news + next_data + else: + return news + else: + return None + + logger.error(f"request em data code: {resp.status_code}, error: {resp.text}") + + +# utils to transform zvt entity to em entity +def to_em_fc(entity_id): + entity_type, exchange, code = decode_entity_id(entity_id) + if entity_type == "stock": + if exchange == "sh": + return f"{code}01" + if exchange == "sz": + return f"{code}02" + + if entity_type == "stockhk": + return code + + if entity_type == "stockus": + if exchange == "nyse": + return f"{code}.N" + if exchange == "nasdaq": + return f"{code}.O" + + +exchange_map_em_flag = { + #: 深证交易所 + Exchange.sz: 0, + #: 上证交易所 + Exchange.sh: 1, + #: 北交所 + Exchange.bj: 0, + #: 纳斯达克 + Exchange.nasdaq: 105, + #: 纽交所 + Exchange.nyse: 106, + #: 中国金融期货交易所 + Exchange.cffex: 8, + #: 上海期货交易所 + Exchange.shfe: 113, + #: 大连商品交易所 + Exchange.dce: 114, + #: 郑州商品交易所 + Exchange.czce: 115, + #: 上海国际能源交易中心 + Exchange.ine: 142, + #: 港交所 + Exchange.hk: 116, + #: 中国行业/概念板块 + Exchange.cn: 90, + #: 美国指数 + Exchange.us: 100, + #: 汇率 + Exchange.forex: 119, +} + + +def to_em_entity_flag(exchange: Union[Exchange, str]): + exchange = Exchange(exchange) + return exchange_map_em_flag.get(exchange, exchange) + + +def to_em_fq_flag(adjust_type: AdjustType): + adjust_type = AdjustType(adjust_type) + if adjust_type == AdjustType.bfq: + return 0 + if adjust_type == AdjustType.qfq: + return 1 + if adjust_type == AdjustType.hfq: + return 2 + + +def to_em_level_flag(level: IntervalLevel): + level = IntervalLevel(level) + if level == IntervalLevel.LEVEL_1MIN: + return 1 + elif level == IntervalLevel.LEVEL_5MIN: + return 5 + elif level == IntervalLevel.LEVEL_15MIN: + return 15 + elif level == IntervalLevel.LEVEL_30MIN: + return 30 + elif level == IntervalLevel.LEVEL_1HOUR: + return 60 + elif level == IntervalLevel.LEVEL_1DAY: + return 101 + elif level == IntervalLevel.LEVEL_1WEEK: + return 102 + elif level == IntervalLevel.LEVEL_1MON: + return 103 + + assert False + + +def to_em_sec_id(entity_id): + entity_type, exchange, code = decode_entity_id(entity_id) + # 主力合约 + if entity_type == "future" and code[-1].isalpha(): + code = code + "m" + if entity_type == "currency" and "CNYC" in code: + return f"120.{code}" + return f"{to_em_entity_flag(exchange)}.{code}" + + +def to_zvt_code(code): + # ('中证当月连续', '8|060120'), + # ('沪深当月连续', '8|040120'), + # ('上证当月连续', '8|070120'), + # ('十债当季连续', '8|110120'), + # ('五债当季连续', '8|050120'), + # ('二债当季连续', '8|130120')] + if code == "060120": + return "IC" + elif code == "040120": + return "IF" + elif code == "070120": + return "IH" + elif code == "110120": + return "T" + elif code == "050120": + return "TF" + elif code == "130120": + return "TS" + return code + + +if __name__ == "__main__": + # from pprint import pprint + # pprint(get_free_holder_report_dates(code='000338')) + # pprint(get_holder_report_dates(code='000338')) + # pprint(get_holders(code='000338', end_date='2021-03-31')) + # pprint(get_free_holders(code='000338', end_date='2021-03-31')) + # pprint(get_ii_holder(code='000338', report_date='2021-03-31', + # org_type=actor_type_to_org_type(ActorType.corporation))) + # print( + # get_ii_summary(code="600519", report_date="2021-03-31", org_type=actor_type_to_org_type(ActorType.corporation)) + # ) + # df = get_kdata(entity_id="index_sz_399370", level="1wk") + # df = get_tradable_list(entity_type="cbond") + # print(df) + # df = get_news("stock_sz_300999", ps=1) + # print(df) + # print(len(df)) + # df = get_tradable_list(entity_type="block") + # print(df) + # df = get_tradable_list(entity_type="indexus") + # print(df) + # df = get_tradable_list(entity_type="currency") + # print(df) + # df = get_tradable_list(entity_type="index") + # print(df) + # df = get_kdata(entity_id="index_us_SPX", level="1d") + # df = get_treasury_yield(pn=1, ps=50, fetch_all=False) + # print(df) + # df = get_future_list() + # print(df) + # df = get_kdata(entity_id="future_dce_I", level="1d") + # print(df) + # df = get_dragon_and_tiger(code="000989", start_date="2018-10-31") + # df = get_dragon_and_tiger_list(start_date="2022-04-25") + # # df = get_tradable_list() + # # df_delist = df[df["name"].str.contains("退")] + # print(df_delist[["id", "name"]].values.tolist()) + # print(get_block_stocks(block_id="block_cn_BK1144")) + # df = get_tradable_list(entity_type="index") + # print(df) + # df = get_kdata(entity_id="stock_bj_873693", level="1d") + # print(df) + # print(get_controlling_shareholder(code="000338")) + # events = get_events(entity_id="stock_sz_300684") + # print(events) + # print(get_hot_topic()) + # record_hot_topic() + # df = StockHotTopic.query_data( + # filters=[func.json_extract(StockHotTopic.entity_ids, "$").contains("stock_sh_600809")], + # ) + # print(df) + # print(get_top_stocks(limit=10)) + # print(get_top_stockhks(limit=10)) + # print(get_controlling_shareholder(code="000338")) + # print(get_top_ten_free_holder_stats(code="000338")) + print(get_stock_turnover()) + + +# the __all__ is generated +__all__ = [ + "get_treasury_yield", + "get_ii_holder_report_dates", + "get_dragon_and_tiger_list", + "get_dragon_and_tiger", + "get_holder_report_dates", + "get_free_holder_report_dates", + "get_controlling_shareholder", + "get_ii_holder", + "get_ii_summary", + "get_free_holders", + "get_top_ten_free_holder_stats", + "get_controlling_shareholder", + "get_holders", + "get_url", + "get_exchange", + "actor_type_to_org_type", + "generate_filters", + "get_em_data", + "get_quotes", + "get_kdata", + "get_basic_info", + "get_future_list", + "get_top_tradable_list", + "get_top_stocks", + "get_top_stockhks", + "get_tradable_list", + "get_block_stocks", + "market_code_to_entity_id", + "get_hot_topic", + "record_hot_topic", + "get_news", + "to_em_fc", + "to_em_entity_flag", + "to_em_fq_flag", + "to_em_level_flag", + "to_em_sec_id", + "to_zvt_code", +] diff --git a/src/zvt/recorders/em/macro/__init__.py b/src/zvt/recorders/em/macro/__init__.py new file mode 100644 index 00000000..8d0a4ee2 --- /dev/null +++ b/src/zvt/recorders/em/macro/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- + + +# the __all__ is generated +__all__ = [] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule em_treasury_yield_recorder +from .em_treasury_yield_recorder import * +from .em_treasury_yield_recorder import __all__ as _em_treasury_yield_recorder_all + +__all__ += _em_treasury_yield_recorder_all diff --git a/src/zvt/recorders/em/macro/em_treasury_yield_recorder.py b/src/zvt/recorders/em/macro/em_treasury_yield_recorder.py new file mode 100644 index 00000000..ea9f9797 --- /dev/null +++ b/src/zvt/recorders/em/macro/em_treasury_yield_recorder.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +import pandas as pd + +from zvt.contract import IntervalLevel +from zvt.contract.api import df_to_db +from zvt.contract.recorder import FixedCycleDataRecorder +from zvt.domain import Country +from zvt.domain.macro.monetary import TreasuryYield +from zvt.recorders.em import em_api + + +class EMTreasuryYieldRecorder(FixedCycleDataRecorder): + entity_schema = Country + data_schema = TreasuryYield + entity_provider = "wb" + provider = "em" + + def __init__( + self, + force_update=True, + sleeping_time=10, + entity_filters=None, + ignore_failed=True, + real_time=False, + fix_duplicate_way="ignore", + start_timestamp=None, + end_timestamp=None, + level=IntervalLevel.LEVEL_1DAY, + kdata_use_begin_time=False, + one_day_trading_minutes=24 * 60, + return_unfinished=False, + ) -> None: + super().__init__( + force_update, + sleeping_time, + None, + None, + None, + None, + ["CN"], + True, + entity_filters, + ignore_failed, + real_time, + fix_duplicate_way, + start_timestamp, + end_timestamp, + level, + kdata_use_begin_time, + one_day_trading_minutes, + return_unfinished, + ) + + def record(self, entity, start, end, size, timestamps): + # record before + if start: + result = em_api.get_treasury_yield(pn=1, ps=size, fetch_all=False) + else: + result = em_api.get_treasury_yield(fetch_all=True) + if result: + df = pd.DataFrame.from_records(result) + df_to_db( + data_schema=self.data_schema, + df=df, + provider=self.provider, + force_update=True, + drop_duplicates=True, + ) + + +if __name__ == "__main__": + r = EMTreasuryYieldRecorder() + r.run() + + +# the __all__ is generated +__all__ = ["EMTreasuryYieldRecorder"] diff --git a/src/zvt/recorders/em/meta/__init__.py b/src/zvt/recorders/em/meta/__init__.py new file mode 100644 index 00000000..fde6b2e2 --- /dev/null +++ b/src/zvt/recorders/em/meta/__init__.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- + + +# the __all__ is generated +__all__ = [] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule em_cbond_meta_recorder +from .em_cbond_meta_recorder import * +from .em_cbond_meta_recorder import __all__ as _em_cbond_meta_recorder_all + +__all__ += _em_cbond_meta_recorder_all + +# import all from submodule em_block_meta_recorder +from .em_block_meta_recorder import * +from .em_block_meta_recorder import __all__ as _em_block_meta_recorder_all + +__all__ += _em_block_meta_recorder_all + +# import all from submodule em_indexus_meta_recorder +from .em_indexus_meta_recorder import * +from .em_indexus_meta_recorder import __all__ as _em_indexus_meta_recorder_all + +__all__ += _em_indexus_meta_recorder_all + +# import all from submodule em_future_meta_recorder +from .em_future_meta_recorder import * +from .em_future_meta_recorder import __all__ as _em_future_meta_recorder_all + +__all__ += _em_future_meta_recorder_all + +# import all from submodule em_stockhk_meta_recorder +from .em_stockhk_meta_recorder import * +from .em_stockhk_meta_recorder import __all__ as _em_stockhk_meta_recorder_all + +__all__ += _em_stockhk_meta_recorder_all + +# import all from submodule em_stockus_meta_recorder +from .em_stockus_meta_recorder import * +from .em_stockus_meta_recorder import __all__ as _em_stockus_meta_recorder_all + +__all__ += _em_stockus_meta_recorder_all + +# import all from submodule em_index_meta_recorder +from .em_index_meta_recorder import * +from .em_index_meta_recorder import __all__ as _em_index_meta_recorder_all + +__all__ += _em_index_meta_recorder_all + +# import all from submodule em_currency_meta_recorder +from .em_currency_meta_recorder import * +from .em_currency_meta_recorder import __all__ as _em_currency_meta_recorder_all + +__all__ += _em_currency_meta_recorder_all + +# import all from submodule em_stock_meta_recorder +from .em_stock_meta_recorder import * +from .em_stock_meta_recorder import __all__ as _em_stock_meta_recorder_all + +__all__ += _em_stock_meta_recorder_all diff --git a/src/zvt/recorders/em/meta/em_block_meta_recorder.py b/src/zvt/recorders/em/meta/em_block_meta_recorder.py new file mode 100644 index 00000000..c7381f3b --- /dev/null +++ b/src/zvt/recorders/em/meta/em_block_meta_recorder.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +import pandas as pd + +from zvt.contract.api import df_to_db +from zvt.contract.recorder import Recorder, TimeSeriesDataRecorder +from zvt.domain import Block, BlockCategory, BlockStock +from zvt.recorders.em import em_api + + +class EMBlockRecorder(Recorder): + provider = "em" + data_schema = Block + + def run(self): + for block_category in [BlockCategory.concept, BlockCategory.industry]: + df = em_api.get_tradable_list(entity_type="block", block_category=block_category) + self.logger.info(df) + df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) + + +class EMBlockStockRecorder(TimeSeriesDataRecorder): + entity_provider = "em" + entity_schema = Block + + provider = "em" + data_schema = BlockStock + + def record(self, entity, start, end, size, timestamps): + the_list = em_api.get_block_stocks(entity.id, entity.name) + if the_list: + df = pd.DataFrame.from_records(the_list) + df_to_db(data_schema=self.data_schema, df=df, provider=self.provider, force_update=True) + self.logger.info("finish recording block:{},{}".format(entity.category, entity.name)) + self.sleep() + + +if __name__ == "__main__": + recorder = EMBlockStockRecorder(day_data=True, sleeping_time=0) + recorder.run() + + +# the __all__ is generated +__all__ = ["EMBlockRecorder", "EMBlockStockRecorder"] diff --git a/src/zvt/recorders/em/meta/em_cbond_meta_recorder.py b/src/zvt/recorders/em/meta/em_cbond_meta_recorder.py new file mode 100644 index 00000000..7982e708 --- /dev/null +++ b/src/zvt/recorders/em/meta/em_cbond_meta_recorder.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- + +from zvt.contract.api import df_to_db +from zvt.contract.recorder import Recorder +from zvt.domain.meta.cbond_meta import CBond +from zvt.recorders.em import em_api + + +class EMCBondRecorder(Recorder): + provider = "em" + data_schema = CBond + + def run(self): + df = em_api.get_tradable_list(entity_type="cbond") + self.logger.info(df) + df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) + + +if __name__ == "__main__": + recorder = EMCBondRecorder() + recorder.run() + + +# the __all__ is generated +__all__ = ["EMCBondRecorder"] diff --git a/src/zvt/recorders/em/meta/em_currency_meta_recorder.py b/src/zvt/recorders/em/meta/em_currency_meta_recorder.py new file mode 100644 index 00000000..f4847e79 --- /dev/null +++ b/src/zvt/recorders/em/meta/em_currency_meta_recorder.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- + +from zvt.contract.api import df_to_db +from zvt.contract.recorder import Recorder +from zvt.domain.meta.currency_meta import Currency +from zvt.recorders.em import em_api + + +class EMCurrencyRecorder(Recorder): + provider = "em" + data_schema = Currency + + def run(self): + df = em_api.get_tradable_list(entity_type="currency") + self.logger.info(df) + df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) + + +if __name__ == "__main__": + recorder = EMCurrencyRecorder(force_update=True) + recorder.run() + + +# the __all__ is generated +__all__ = ["EMCurrencyRecorder"] diff --git a/src/zvt/recorders/em/meta/em_future_meta_recorder.py b/src/zvt/recorders/em/meta/em_future_meta_recorder.py new file mode 100644 index 00000000..f57f4ec2 --- /dev/null +++ b/src/zvt/recorders/em/meta/em_future_meta_recorder.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- + +from zvt.contract.api import df_to_db +from zvt.contract.recorder import Recorder +from zvt.domain import Future +from zvt.recorders.em import em_api + + +class EMFutureRecorder(Recorder): + provider = "em" + data_schema = Future + + def run(self): + df = em_api.get_tradable_list(entity_type="future") + self.logger.info(df) + df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) + + +if __name__ == "__main__": + recorder = EMFutureRecorder(force_update=True) + recorder.run() + + +# the __all__ is generated +__all__ = ["EMFutureRecorder"] diff --git a/src/zvt/recorders/em/meta/em_index_meta_recorder.py b/src/zvt/recorders/em/meta/em_index_meta_recorder.py new file mode 100644 index 00000000..9c74265b --- /dev/null +++ b/src/zvt/recorders/em/meta/em_index_meta_recorder.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- + +from zvt.contract.api import df_to_db +from zvt.contract.recorder import Recorder +from zvt.domain import Index +from zvt.recorders.em import em_api + + +class EMIndexRecorder(Recorder): + provider = "em" + data_schema = Index + + def run(self): + df = em_api.get_tradable_list(entity_type="index") + self.logger.info(df) + df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) + + +if __name__ == "__main__": + recorder = EMIndexRecorder() + recorder.run() + + +# the __all__ is generated +__all__ = ["EMIndexRecorder"] diff --git a/src/zvt/recorders/em/meta/em_indexus_meta_recorder.py b/src/zvt/recorders/em/meta/em_indexus_meta_recorder.py new file mode 100644 index 00000000..4cbfb29e --- /dev/null +++ b/src/zvt/recorders/em/meta/em_indexus_meta_recorder.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- + +from zvt.contract.api import df_to_db +from zvt.contract.recorder import Recorder +from zvt.domain.meta.indexus_meta import Indexus +from zvt.recorders.em import em_api + + +class EMIndexusRecorder(Recorder): + provider = "em" + data_schema = Indexus + + def run(self): + df = em_api.get_tradable_list(entity_type="indexus") + self.logger.info(df) + df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) + + +if __name__ == "__main__": + recorder = EMIndexusRecorder() + recorder.run() + + +# the __all__ is generated +__all__ = ["EMIndexusRecorder"] diff --git a/src/zvt/recorders/em/meta/em_stock_meta_recorder.py b/src/zvt/recorders/em/meta/em_stock_meta_recorder.py new file mode 100644 index 00000000..41cfa2a6 --- /dev/null +++ b/src/zvt/recorders/em/meta/em_stock_meta_recorder.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- + +from sqlalchemy.sql.expression import text + +from zvt.contract import Exchange +from zvt.contract.api import df_to_db +from zvt.contract.recorder import Recorder +from zvt.domain import Stock +from zvt.recorders.em import em_api +from zvt.utils.pd_utils import pd_is_not_null + + +class EMStockRecorder(Recorder): + provider = "em" + data_schema = Stock + + def run(self): + for exchange in [Exchange.sh, Exchange.sz, Exchange.bj]: + df = em_api.get_tradable_list(entity_type="stock", exchange=exchange) + # df_delist = df[df["name"].str.contains("退")] + if pd_is_not_null(df): + for item in df[["id", "name"]].values.tolist(): + id = item[0] + name = item[1] + sql = text(f'update stock set name = "{name}" where id = "{id}"') + self.session.execute(sql) + self.session.commit() + self.logger.info(df) + df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) + + +if __name__ == "__main__": + recorder = EMStockRecorder() + recorder.run() + + +# the __all__ is generated +__all__ = ["EMStockRecorder"] diff --git a/src/zvt/recorders/em/meta/em_stockhk_meta_recorder.py b/src/zvt/recorders/em/meta/em_stockhk_meta_recorder.py new file mode 100644 index 00000000..20840cbc --- /dev/null +++ b/src/zvt/recorders/em/meta/em_stockhk_meta_recorder.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- + +from zvt.contract.api import df_to_db +from zvt.contract.recorder import Recorder +from zvt.domain.meta.stockhk_meta import Stockhk +from zvt.recorders.em import em_api + + +class EMStockhkRecorder(Recorder): + provider = "em" + data_schema = Stockhk + + def run(self): + df_south = em_api.get_tradable_list(entity_type="stockhk", hk_south=True) + df_south = df_south.set_index("code", drop=False) + df_south["south"] = True + + df = em_api.get_tradable_list(entity_type="stockhk") + df = df.set_index("code", drop=False) + df_other = df.loc[~df.index.isin(df_south.index)].copy() + df_other["south"] = False + df_to_db(df=df_south, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) + df_to_db(df=df_other, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) + + +if __name__ == "__main__": + recorder = EMStockhkRecorder() + recorder.run() + + +# the __all__ is generated +__all__ = ["EMStockhkRecorder"] diff --git a/src/zvt/recorders/em/meta/em_stockus_meta_recorder.py b/src/zvt/recorders/em/meta/em_stockus_meta_recorder.py new file mode 100644 index 00000000..81736155 --- /dev/null +++ b/src/zvt/recorders/em/meta/em_stockus_meta_recorder.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- + +from zvt.contract.api import df_to_db +from zvt.contract.recorder import Recorder +from zvt.domain.meta.stockus_meta import Stockus +from zvt.recorders.em import em_api + + +class EMStockusRecorder(Recorder): + provider = "em" + data_schema = Stockus + + def run(self): + df = em_api.get_tradable_list(entity_type="stockus") + self.logger.info(df) + df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) + + +if __name__ == "__main__": + recorder = EMStockusRecorder() + recorder.run() + + +# the __all__ is generated +__all__ = ["EMStockusRecorder"] diff --git a/src/zvt/recorders/em/misc/__init__.py b/src/zvt/recorders/em/misc/__init__.py new file mode 100644 index 00000000..c8ba9435 --- /dev/null +++ b/src/zvt/recorders/em/misc/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- + + +# the __all__ is generated +__all__ = [] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule em_stock_news_recorder +from .em_stock_news_recorder import * +from .em_stock_news_recorder import __all__ as _em_stock_news_recorder_all + +__all__ += _em_stock_news_recorder_all diff --git a/src/zvt/recorders/em/misc/em_stock_news_recorder.py b/src/zvt/recorders/em/misc/em_stock_news_recorder.py new file mode 100644 index 00000000..e463090f --- /dev/null +++ b/src/zvt/recorders/em/misc/em_stock_news_recorder.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +import pandas as pd + +from zvt.contract.api import df_to_db +from zvt.contract.recorder import FixedCycleDataRecorder +from zvt.domain import Stock +from zvt.domain.misc.stock_news import StockNews +from zvt.recorders.em import em_api +from zvt.utils.time_utils import count_interval, now_pd_timestamp, recent_year_date + + +class EMStockNewsRecorder(FixedCycleDataRecorder): + original_page_url = "https://wap.eastmoney.com/quote/stock/0.002572.html" + url = "https://np-listapi.eastmoney.com/comm/wap/getListInfo?cb=callback&client=wap&type=1&mTypeAndCode=0.002572&pageSize=200&pageIndex={}&callback=jQuery1830017478247906740352_1644568731256&_=1644568879493" + + entity_schema = Stock + data_schema = StockNews + entity_provider = "em" + provider = "em" + + def record(self, entity, start, end, size, timestamps): + from_date = recent_year_date() + if not start or (start < from_date): + start = from_date + + if count_interval(start, now_pd_timestamp()) <= 30: + ps = 30 + else: + ps = 200 + + latest_news: StockNews = self.get_latest_saved_record(entity=entity) + + news = em_api.get_news( + session=self.http_session, + entity_id=entity.id, + ps=ps, + start_timestamp=start, + latest_code=latest_news.news_code if latest_news else None, + ) + if news: + df = pd.DataFrame.from_records(news) + self.logger.info(df) + df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) + + +if __name__ == "__main__": + # df = Stock.query_data(filters=[Stock.exchange == "bj"], provider="em") + # entity_ids = df["entity_id"].tolist() + r = EMStockNewsRecorder(entity_ids=["stock_sh_600345"], sleeping_time=0) + r.run() + + +# the __all__ is generated +__all__ = ["EMStockNewsRecorder"] diff --git a/src/zvt/recorders/em/quotes/__init__.py b/src/zvt/recorders/em/quotes/__init__.py new file mode 100644 index 00000000..2a0affe9 --- /dev/null +++ b/src/zvt/recorders/em/quotes/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- + + +# the __all__ is generated +__all__ = [] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule em_kdata_recorder +from .em_kdata_recorder import * +from .em_kdata_recorder import __all__ as _em_kdata_recorder_all + +__all__ += _em_kdata_recorder_all diff --git a/src/zvt/recorders/em/quotes/em_kdata_recorder.py b/src/zvt/recorders/em/quotes/em_kdata_recorder.py new file mode 100644 index 00000000..c8095904 --- /dev/null +++ b/src/zvt/recorders/em/quotes/em_kdata_recorder.py @@ -0,0 +1,211 @@ +# -*- coding: utf-8 -*- + +from zvt.api.kdata import get_kdata_schema +from zvt.contract import IntervalLevel, AdjustType +from zvt.contract.api import df_to_db +from zvt.contract.recorder import FixedCycleDataRecorder +from zvt.domain import ( + Stock, + Index, + Block, + StockKdataCommon, + IndexKdataCommon, + StockhkKdataCommon, + StockusKdataCommon, + BlockKdataCommon, + Indexus, + IndexusKdataCommon, + Future, + FutureKdataCommon, + Currency, + CurrencyKdataCommon, +) +from zvt.domain.meta.stockhk_meta import Stockhk +from zvt.domain.meta.stockus_meta import Stockus +from zvt.recorders.em import em_api +from zvt.utils.pd_utils import pd_is_not_null +from zvt.utils.time_utils import count_interval, now_pd_timestamp, current_date + + +class BaseEMStockKdataRecorder(FixedCycleDataRecorder): + default_size = 50000 + entity_provider: str = "em" + + provider = "em" + + def __init__( + self, + force_update=True, + sleeping_time=10, + exchanges=None, + entity_id=None, + entity_ids=None, + code=None, + codes=None, + day_data=False, + entity_filters=None, + ignore_failed=True, + real_time=False, + fix_duplicate_way="ignore", + start_timestamp=None, + end_timestamp=None, + level=IntervalLevel.LEVEL_1DAY, + kdata_use_begin_time=False, + one_day_trading_minutes=24 * 60, + adjust_type=AdjustType.qfq, + return_unfinished=False, + ) -> None: + level = IntervalLevel(level) + self.adjust_type = AdjustType(adjust_type) + self.entity_type = self.entity_schema.__name__.lower() + + self.data_schema = get_kdata_schema(entity_type=self.entity_type, level=level, adjust_type=self.adjust_type) + + super().__init__( + force_update, + sleeping_time, + exchanges, + entity_id, + entity_ids, + code, + codes, + day_data, + entity_filters, + ignore_failed, + real_time, + fix_duplicate_way, + start_timestamp, + end_timestamp, + level, + kdata_use_begin_time, + one_day_trading_minutes, + return_unfinished, + ) + + def record(self, entity, start, end, size, timestamps): + df = em_api.get_kdata( + session=self.http_session, entity_id=entity.id, limit=size, adjust_type=self.adjust_type, level=self.level + ) + if pd_is_not_null(df): + df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) + else: + self.logger.info(f"no kdata for {entity.id}") + + def on_finish_entity(self, entity): + # fill timestamp + if not entity.timestamp or not entity.list_date: + # get the first + kdatas = self.data_schema.query_data( + provider=self.provider, + entity_id=entity.id, + order=self.data_schema.timestamp.asc(), + limit=1, + return_type="domain", + ) + if kdatas: + timestamp = kdatas[0].timestamp + + self.logger.info(f"fill {entity.name} list_date as {timestamp}") + + if not entity.timestamp: + entity.timestamp = timestamp + if not entity.list_date: + entity.list_date = timestamp + self.entity_session.add(entity) + self.entity_session.commit() + + +class EMStockKdataRecorder(BaseEMStockKdataRecorder): + entity_schema = Stock + data_schema = StockKdataCommon + + def on_finish_entity(self, entity): + super().on_finish_entity(entity) + # fill holder + if not entity.holder_modified_date or (count_interval(entity.holder_modified_date, now_pd_timestamp()) > 30): + holder = em_api.get_controlling_shareholder(code=entity.code) + if holder: + entity.controlling_holder = holder.get("holder") + if holder.get("parent"): + entity.controlling_holder_parent = holder.get("parent") + else: + entity.controlling_holder_parent = holder.get("holder") + entity.holder_modified_date = current_date() + self.entity_session.add(entity) + self.entity_session.commit() + holder_stats = em_api.get_top_ten_free_holder_stats(code=entity.code) + if holder_stats: + entity.top_ten_ratio = holder_stats.get("ratio") + entity.holder_modified_date = current_date() + self.entity_session.add(entity) + self.entity_session.commit() + + +class EMStockusKdataRecorder(BaseEMStockKdataRecorder): + entity_provider = "em" + entity_schema = Stockus + data_schema = StockusKdataCommon + + +class EMStockhkKdataRecorder(BaseEMStockKdataRecorder): + entity_provider = "em" + entity_schema = Stockhk + data_schema = StockhkKdataCommon + + +class EMIndexKdataRecorder(BaseEMStockKdataRecorder): + entity_provider = "em" + entity_schema = Index + + data_schema = IndexKdataCommon + + +class EMIndexusKdataRecorder(BaseEMStockKdataRecorder): + entity_provider = "em" + entity_schema = Indexus + + data_schema = IndexusKdataCommon + + +class EMBlockKdataRecorder(BaseEMStockKdataRecorder): + entity_provider = "em" + entity_schema = Block + + data_schema = BlockKdataCommon + + +class EMFutureKdataRecorder(BaseEMStockKdataRecorder): + entity_provider = "em" + entity_schema = Future + + data_schema = FutureKdataCommon + + +class EMCurrencyKdataRecorder(BaseEMStockKdataRecorder): + entity_provider = "em" + entity_schema = Currency + + data_schema = CurrencyKdataCommon + + +if __name__ == "__main__": + df = Stock.query_data(filters=[Stock.exchange == "bj"], provider="em") + entity_ids = df["entity_id"].tolist() + recorder = EMStockKdataRecorder( + level=IntervalLevel.LEVEL_1DAY, entity_ids=entity_ids, sleeping_time=0, adjust_type=AdjustType.hfq + ) + recorder.run() + + +# the __all__ is generated +__all__ = [ + "BaseEMStockKdataRecorder", + "EMStockKdataRecorder", + "EMStockusKdataRecorder", + "EMStockhkKdataRecorder", + "EMIndexKdataRecorder", + "EMIndexusKdataRecorder", + "EMBlockKdataRecorder", + "EMFutureKdataRecorder", + "EMCurrencyKdataRecorder", +] diff --git a/src/zvt/recorders/em/trading/__init__.py b/src/zvt/recorders/em/trading/__init__.py new file mode 100644 index 00000000..c01971b8 --- /dev/null +++ b/src/zvt/recorders/em/trading/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- + + +# the __all__ is generated +__all__ = [] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule em_dragon_and_tiger_recorder +from .em_dragon_and_tiger_recorder import * +from .em_dragon_and_tiger_recorder import __all__ as _em_dragon_and_tiger_recorder_all + +__all__ += _em_dragon_and_tiger_recorder_all diff --git a/src/zvt/recorders/em/trading/em_dragon_and_tiger_recorder.py b/src/zvt/recorders/em/trading/em_dragon_and_tiger_recorder.py new file mode 100644 index 00000000..4a31b910 --- /dev/null +++ b/src/zvt/recorders/em/trading/em_dragon_and_tiger_recorder.py @@ -0,0 +1,200 @@ +# -*- coding: utf-8 -*- + +import pandas as pd + +from zvt.contract.api import df_to_db +from zvt.contract.recorder import FixedCycleDataRecorder +from zvt.domain import Stock, DragonAndTiger +from zvt.recorders.em import em_api +from zvt.utils.time_utils import to_pd_timestamp, to_time_str, TIME_FORMAT_DAY, date_time_by_interval + +{ + "TRADE_ID": "3066028", + "TRADE_DATE": "2018-10-31 00:00:00", + # 原因 + "EXPLANATION": "日涨幅偏离值达到7%的前五只证券", + "SECUCODE": "000989.SZ", + "SECURITY_CODE": "000989", + "SECURITY_NAME_ABBR": "九芝堂", + # 成交额 + "ACCUM_AMOUNT": 361620405, + # 涨跌幅 + "CHANGE_RATE": 10.0324, + # 净买入 + "NET_BUY": 101274668.45, + "BUY_BUY_TOTAL": 150153489.67, + "BUY_SELL_TOTAL": 6319593.12, + "BUY_RATIO_TOTAL": 41.810172373984, + "SELL_BUY_TOTAL": 31575718.69, + "SELL_SELL_TOTAL": 49862244.22, + "SELL_RATIO_TOTAL": 13.80437760972, + # 买入金额 + "BUY_TOTAL": 151194114.67, + # 卖出金额 + "SELL_TOTAL": 49919446.22, + "BUY_TOTAL_NET": 143833896.55, + "SELL_TOTAL_NET": -18286525.53, + "LIST": [ + { + "TRADE_DIRECTION": "0", + "RANK": 1, + "OPERATEDEPT_NAME": "西藏东方财富证券股份有限公司武汉建设大道证券营业部", + "BUY_AMT_REAL": 92701932.28, + "SELL_AMT_REAL": 0, + "BUY_RATIO": 25.635149731111, + "SELL_RATIO": 0, + "NET": 92701932.28, + }, + { + "TRADE_DIRECTION": "1", + "RANK": 1, + "OPERATEDEPT_NAME": "中泰证券股份有限公司惠州文明一路证券营业部", + "BUY_AMT_REAL": 0, + "SELL_AMT_REAL": 20806577, + "BUY_RATIO": 0, + "SELL_RATIO": 5.753706569739, + "NET": -20806577, + }, + { + "TRADE_DIRECTION": "1", + "RANK": 2, + "OPERATEDEPT_NAME": "中泰证券股份有限公司深圳泰然九路证券营业部", + "BUY_AMT_REAL": 0, + "SELL_AMT_REAL": 9999269.85, + "BUY_RATIO": 0, + "SELL_RATIO": 2.765128768107, + "NET": -9999269.85, + }, + { + "TRADE_DIRECTION": "0", + "RANK": 2, + "OPERATEDEPT_NAME": "深股通专用", + "BUY_AMT_REAL": 30535093.69, + "SELL_AMT_REAL": 6262391.12, + "BUY_RATIO": 8.443963135874, + "SELL_RATIO": 1.731758228632, + "NET": 24272702.57, + }, + { + "TRADE_DIRECTION": "0", + "RANK": 3, + "OPERATEDEPT_NAME": "联储证券有限责任公司郑州文化路证券营业部", + "BUY_AMT_REAL": 10185863, + "SELL_AMT_REAL": 45600, + "BUY_RATIO": 2.816727944321, + "SELL_RATIO": 0.012609907895, + "NET": 10140263, + }, + { + "TRADE_DIRECTION": "1", + "RANK": 3, + "OPERATEDEPT_NAME": "中信证券股份有限公司杭州文三路证券营业部", + "BUY_AMT_REAL": 1040625, + "SELL_AMT_REAL": 7246342.25, + "BUY_RATIO": 0.287767223755, + "SELL_RATIO": 2.003853253248, + "NET": -6205717.25, + }, + { + "TRADE_DIRECTION": "0", + "RANK": 4, + "OPERATEDEPT_NAME": "华泰证券股份有限公司北京广渠门内大街证券营业部", + "BUY_AMT_REAL": 9089939.7, + "SELL_AMT_REAL": 0, + "BUY_RATIO": 2.513668912018, + "SELL_RATIO": 0, + "NET": 9089939.7, + }, + { + "TRADE_DIRECTION": "1", + "RANK": 4, + "OPERATEDEPT_NAME": "深股通专用", + "BUY_AMT_REAL": 30535093.69, + "SELL_AMT_REAL": 6262391.12, + "BUY_RATIO": 8.443963135874, + "SELL_RATIO": 1.731758228632, + "NET": 24272702.57, + }, + { + "TRADE_DIRECTION": "1", + "RANK": 5, + "OPERATEDEPT_NAME": "英大证券有限责任公司深圳园岭三街证券营业部", + "BUY_AMT_REAL": 0, + "SELL_AMT_REAL": 5547664, + "BUY_RATIO": 0, + "SELL_RATIO": 1.534112545447, + "NET": -5547664, + }, + { + "TRADE_DIRECTION": "0", + "RANK": 5, + "OPERATEDEPT_NAME": "申万宏源证券有限公司南宁长湖路证券营业部", + "BUY_AMT_REAL": 7640661, + "SELL_AMT_REAL": 11602, + "BUY_RATIO": 2.112895426905, + "SELL_RATIO": 0.003208336653, + "NET": 7629059, + }, + ], +} + + +class EMDragonAndTigerRecorder(FixedCycleDataRecorder): + entity_provider = "em" + entity_schema = Stock + + provider = "em" + data_schema = DragonAndTiger + + def record(self, entity, start, end, size, timestamps): + if start: + start_date = to_time_str(date_time_by_interval(start)) + else: + start_date = None + datas = em_api.get_dragon_and_tiger(code=entity.code, start_date=start_date) + if datas: + records = [] + for data in datas: + timestamp = to_pd_timestamp(data["TRADE_DATE"]) + record = { + "id": "{}_{}_{}".format(entity.id, data["TRADE_ID"], to_time_str(timestamp, fmt=TIME_FORMAT_DAY)), + "entity_id": entity.id, + "timestamp": timestamp, + "code": entity.code, + "name": entity.name, + "reason": data["EXPLANATION"], + "turnover": data["ACCUM_AMOUNT"], + "change_pct": data["CHANGE_RATE"], + "net_in": data["NET_BUY"], + } + + # 营业部列表 + deps = data["LIST"] + for dep in deps: + flag = "" if dep["TRADE_DIRECTION"] == "0" else "_" + rank = dep["RANK"] + dep_name = f"dep{flag}{rank}" + dep_in = f"{dep_name}_in" + dep_out = f"{dep_name}_out" + dep_rate = f"{dep_name}_rate" + + record[dep_name] = dep["OPERATEDEPT_NAME"] + record[dep_in] = dep["BUY_AMT_REAL"] + record[dep_out] = dep["SELL_AMT_REAL"] + record[dep_rate] = (dep["BUY_RATIO"] if dep["BUY_RATIO"] else 0) - ( + dep["SELL_RATIO"] if dep["SELL_RATIO"] else 0 + ) + + records.append(record) + df = pd.DataFrame.from_records(records) + df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) + else: + self.logger.info(f"no data for {entity.id}") + + +if __name__ == "__main__": + EMDragonAndTigerRecorder(sleeping_time=0.1, exchanges=["sh"]).run() + + +# the __all__ is generated +__all__ = ["EMDragonAndTigerRecorder"] diff --git a/src/zvt/recorders/exchange/__init__.py b/src/zvt/recorders/exchange/__init__.py new file mode 100644 index 00000000..5ca5d1ef --- /dev/null +++ b/src/zvt/recorders/exchange/__init__.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*-# + + +# the __all__ is generated +__all__ = [] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule exchange_stock_summary_recorder +from .exchange_stock_summary_recorder import * +from .exchange_stock_summary_recorder import __all__ as _exchange_stock_summary_recorder_all + +__all__ += _exchange_stock_summary_recorder_all + +# import all from submodule api +from .api import * +from .api import __all__ as _api_all + +__all__ += _api_all + +# import all from submodule exchange_stock_meta_recorder +from .exchange_stock_meta_recorder import * +from .exchange_stock_meta_recorder import __all__ as _exchange_stock_meta_recorder_all + +__all__ += _exchange_stock_meta_recorder_all + +# import all from submodule exchange_index_recorder +from .exchange_index_recorder import * +from .exchange_index_recorder import __all__ as _exchange_index_recorder_all + +__all__ += _exchange_index_recorder_all + +# import all from submodule exchange_etf_meta_recorder +from .exchange_etf_meta_recorder import * +from .exchange_etf_meta_recorder import __all__ as _exchange_etf_meta_recorder_all + +__all__ += _exchange_etf_meta_recorder_all + +# import all from submodule exchange_index_stock_recorder +from .exchange_index_stock_recorder import * +from .exchange_index_stock_recorder import __all__ as _exchange_index_stock_recorder_all + +__all__ += _exchange_index_stock_recorder_all diff --git a/src/zvt/recorders/exchange/api/__init__.py b/src/zvt/recorders/exchange/api/__init__.py new file mode 100644 index 00000000..453ccf18 --- /dev/null +++ b/src/zvt/recorders/exchange/api/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- + + +# the __all__ is generated +__all__ = [] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule cs_index_stock_api +from .cs_index_stock_api import * +from .cs_index_stock_api import __all__ as _cs_index_stock_api_all + +__all__ += _cs_index_stock_api_all + +# import all from submodule cs_index_api +from .cs_index_api import * +from .cs_index_api import __all__ as _cs_index_api_all + +__all__ += _cs_index_api_all + +# import all from submodule cn_index_api +from .cn_index_api import * +from .cn_index_api import __all__ as _cn_index_api_all + +__all__ += _cn_index_api_all + +# import all from submodule cn_index_stock_api +from .cn_index_stock_api import * +from .cn_index_stock_api import __all__ as _cn_index_stock_api_all + +__all__ += _cn_index_stock_api_all diff --git a/src/zvt/recorders/exchange/api/cn_index_api.py b/src/zvt/recorders/exchange/api/cn_index_api.py new file mode 100644 index 00000000..0d1262ca --- /dev/null +++ b/src/zvt/recorders/exchange/api/cn_index_api.py @@ -0,0 +1,122 @@ +# -*- coding: utf-8 -*- +import logging +import time + +import pandas as pd +import requests + +from zvt.domain import IndexCategory +from zvt.recorders.consts import DEFAULT_HEADER +from zvt.utils.time_utils import to_pd_timestamp + +logger = logging.getLogger(__name__) + +original_page_url = "http://www.cnindex.com.cn/zh_indices/sese/index.html?act_menu=1&index_type=-1" +url = "http://www.cnindex.com.cn/index/indexList?channelCode={}&rows=1000&pageNum=1" + +# 中证指数 抓取 风格指数 行业指数 规模指数 基金指数 +cni_category_map_url = { + IndexCategory.style: url.format("202"), + IndexCategory.industry: url.format("201"), + IndexCategory.scope: url.format("200"), + IndexCategory.fund: url.format("207"), +} + +# 深证指数 只取规模指数 +sz_category_map_url = { + IndexCategory.scope: url.format("100"), +} + + +def _get_resp_data(resp: requests.Response): + resp.raise_for_status() + return resp.json()["data"] + + +def get_cn_index(index_type="cni", category=IndexCategory.style): + if index_type == "cni": + category_map_url = cni_category_map_url + elif index_type == "sz": + category_map_url = sz_category_map_url + else: + logger.error(f"not support index_type: {index_type}") + assert False + + requests_session = requests.Session() + + url = category_map_url.get(category) + + resp = requests_session.get(url, headers=DEFAULT_HEADER) + + results = _get_resp_data(resp)["rows"] + # e.g + # amount: 277743699997.9 + # closeingPoint: 6104.7592 + # docchannel: 1039 + # freeMarketValue: 10794695531696.15 + # id: 142 + # indexcode: "399370" + # indexename: "CNI Growth" + # indexfullcname: "国证1000成长指数" + # indexfullename: "CNI 1000 Growth Index" + # indexname: "国证成长" + # indexsource: "1" + # indextype: "202" + # pb: 5.34 + # peDynamic: 29.8607 + # peStatic: 33.4933 + # percent: 0.0022 + # prefixmonth: null + # realtimemarket: "1" + # remark: "" + # sampleshowdate: null + # samplesize: 332 + # showcnindex: "1" + # totalMarketValue: 23113641352198.32 + the_list = [] + + logger.info(f"category: {category} ") + logger.info(f"results: {results} ") + for i, result in enumerate(results): + logger.info(f"to {i}/{len(results)}") + code = result["indexcode"] + info_resp = requests_session.get(f"http://www.cnindex.com.cn/index-intro?indexcode={code}") + # fbrq: "2010-01-04" + # jd: 1000 + # jr: "2002-12-31" + # jsfs: "自由流通市值" + # jsjj: "国证成长由国证1000指数样本股中成长风格突出的股票组成,为投资者提供更丰富的指数化投资工具。" + # qzsx: null + # typl: 2 + # xyfw: "沪深A股" + # xygz: "在国证1000指数样本股中,选取主营业务收入增长率、净利润增长率和净资产收益率综合排名前332只" + index_info = _get_resp_data(info_resp) + name = result["indexname"] + entity_id = f"index_sz_{code}" + index_item = { + "id": entity_id, + "entity_id": entity_id, + "timestamp": to_pd_timestamp(index_info["jr"]), + "entity_type": "index", + "exchange": "sz", + "code": code, + "name": name, + "category": category.value, + "list_date": to_pd_timestamp(index_info["fbrq"]), + "base_point": index_info["jd"], + "publisher": "cnindex", + } + logger.info(index_item) + the_list.append(index_item) + time.sleep(3) + if the_list: + return pd.DataFrame.from_records(the_list) + + +if __name__ == "__main__": + df = get_cn_index() + print(df) + + +# the __all__ is generated +__all__ = ["get_cn_index"] diff --git a/src/zvt/recorders/exchange/api/cn_index_stock_api.py b/src/zvt/recorders/exchange/api/cn_index_stock_api.py new file mode 100644 index 00000000..14278c36 --- /dev/null +++ b/src/zvt/recorders/exchange/api/cn_index_stock_api.py @@ -0,0 +1,76 @@ +# -*- coding: utf-8 -*- +import logging + +import pandas as pd +import requests + +from zvt.api.utils import china_stock_code_to_id, value_to_pct, value_multiply +from zvt.recorders.consts import DEFAULT_HEADER +from zvt.utils.time_utils import to_pd_timestamp, to_time_str, TIME_FORMAT_MON + +logger = logging.getLogger(__name__) + +original_page_url = "http://www.cnindex.com.cn/module/index-detail.html?act_menu=1&indexCode=399001" +url = "http://www.cnindex.com.cn/sample-detail/detail?indexcode={}&dateStr={}&pageNum=1&rows=5000" + + +def _get_resp_data(resp: requests.Response): + resp.raise_for_status() + return resp.json()["data"] + + +def get_cn_index_stock(code, timestamp, name=None): + entity_type = "index" + exchange = "sz" + entity_id = f"{entity_type}_{exchange}_{code}" + data_str = to_time_str(timestamp, TIME_FORMAT_MON) + resp = requests.get(url.format(code, data_str), headers=DEFAULT_HEADER) + data = _get_resp_data(resp) + if not data: + return + results = _get_resp_data(resp)["rows"] + + the_list = [] + for result in results: + # date: 1614268800000 + # dateStr: "2021-02-26" + # freeMarketValue: 10610.8 + # indexcode: "399370" + # market: null + # seccode: "600519" + # secname: "贵州茅台" + # totalMarketValue: 26666.32 + # trade: "主要消费" + # weight: 10.01 + stock_code = result["seccode"] + stock_name = result["secname"] + stock_id = china_stock_code_to_id(stock_code) + + the_list.append( + { + "id": "{}_{}_{}".format(entity_id, result["dateStr"], stock_id), + "entity_id": entity_id, + "entity_type": entity_type, + "exchange": exchange, + "code": code, + "name": name, + "timestamp": to_pd_timestamp(result["dateStr"]), + "stock_id": stock_id, + "stock_code": stock_code, + "stock_name": stock_name, + "proportion": value_to_pct(result["weight"], 0), + "market_cap": value_multiply(result["freeMarketValue"], 100000000, 0), + } + ) + if the_list: + df = pd.DataFrame.from_records(the_list) + return df + + +if __name__ == "__main__": + df = get_cn_index_stock(timestamp="2021-08-01", code="399370", name="国证成长") + print(df) + + +# the __all__ is generated +__all__ = ["get_cn_index_stock"] diff --git a/src/zvt/recorders/exchange/api/cs_index_api.py b/src/zvt/recorders/exchange/api/cs_index_api.py new file mode 100644 index 00000000..20fcf25a --- /dev/null +++ b/src/zvt/recorders/exchange/api/cs_index_api.py @@ -0,0 +1,109 @@ +# -*- coding: utf-8 -*- +import logging + +import pandas as pd +import requests + +from zvt.domain import IndexCategory +from zvt.recorders.consts import DEFAULT_HEADER +from zvt.utils.time_utils import to_pd_timestamp + +logger = logging.getLogger(__name__) + +original_page_url = "https://www.csindex.com.cn/zh-CN#/indices/family/list?index_series=2" + +url = "https://www.csindex.com.cn/csindex-home/index-list/query-index-item" + +index_category_map = {IndexCategory.scope: "17", IndexCategory.industry: "18", IndexCategory.style: "19"} + + +def _get_resp_data(resp: requests.Response): + resp.raise_for_status() + return resp.json()["data"] + + +def _get_params(index_type, category: IndexCategory): + if index_type == "csi": + index_series = ["1"] + elif index_type == "sh": + index_series = ["2"] + else: + logger.warning(f"not support index type: {index_type}") + assert False + index_classify = index_category_map.get(category) + + return { + "sorter": {"sortField": "index_classify", "sortOrder": "asc"}, + "pager": {"pageNum": 1, "pageSize": 10}, + "indexFilter": { + "ifCustomized": None, + "ifTracked": None, + "ifWeightCapped": None, + "indexCompliance": None, + "hotSpot": None, + "indexClassify": [index_classify], + "currency": None, + "region": None, + "indexSeries": index_series, + "undefined": None, + }, + } + + +def get_cs_index(index_type="sh"): + if index_type == "csi": + category_list = [IndexCategory.scope, IndexCategory.industry, IndexCategory.style] + elif index_type == "sh": + category_list = [IndexCategory.scope] + else: + logger.warning(f"not support index type: {index_type}") + assert False + + requests_session = requests.Session() + + for category in category_list: + data = _get_params(index_type=index_type, category=category) + print(data) + resp = requests_session.post(url, headers=DEFAULT_HEADER, json=data) + + print(resp) + results = _get_resp_data(resp) + the_list = [] + + logger.info(f"category: {category} ") + logger.info(f"results: {results} ") + for i, result in enumerate(results): + logger.info(f"to {i}/{len(results)}") + code = result["indexCode"] + + info_url = f"https://www.csindex.com.cn/csindex-home/indexInfo/index-basic-info/{code}" + info = _get_resp_data(requests_session.get(info_url)) + + name = result["indexName"] + entity_id = f"index_sh_{code}" + index_item = { + "id": entity_id, + "entity_id": entity_id, + "timestamp": to_pd_timestamp(info["basicDate"]), + "entity_type": "index", + "exchange": "sh", + "code": code, + "name": name, + "category": category.value, + "list_date": to_pd_timestamp(result["publishDate"]), + "base_point": info["basicIndex"], + "publisher": "csindex", + } + logger.info(index_item) + the_list.append(index_item) + if the_list: + return pd.DataFrame.from_records(the_list) + + +if __name__ == "__main__": + df = get_cs_index() + print(df) + + +# the __all__ is generated +__all__ = ["get_cs_index"] diff --git a/src/zvt/recorders/exchange/api/cs_index_stock_api.py b/src/zvt/recorders/exchange/api/cs_index_stock_api.py new file mode 100644 index 00000000..ce16890b --- /dev/null +++ b/src/zvt/recorders/exchange/api/cs_index_stock_api.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +import io +import logging + +import pandas as pd +import requests + +from zvt.api.utils import china_stock_code_to_id +from zvt.recorders.consts import DEFAULT_HEADER +from zvt.utils.time_utils import now_pd_timestamp + +logger = logging.getLogger(__name__) + +original_page_url = "http://www.csindex.com.cn/zh-CN/downloads/indices" +url = "http://www.csindex.com.cn/uploads/file/autofile/cons/{}cons.xls" + + +def get_cs_index_stock(code, timestamp, name=None): + entity_type = "index" + exchange = "sh" + entity_id = f"{entity_type}_{exchange}_{code}" + + response = requests.get(url.format(code), headers=DEFAULT_HEADER) + response.raise_for_status() + + df = pd.read_excel(io.BytesIO(response.content)) + + df = df[["日期Date", "成分券代码Constituent Code", "成分券名称Constituent Name"]].rename( + columns={"日期Date": "timestamp", "成分券代码Constituent Code": "stock_code", "成分券名称Constituent Name": "stock_name"} + ) + + df["entity_id"] = entity_id + df["entity_type"] = "index" + df["exchange"] = "sh" + df["code"] = code + df["name"] = name + df["stock_id"] = df["stock_code"].apply(lambda x: china_stock_code_to_id(str(x))) + # id format: {entity_id}_{timestamp}_{stock_id} + df["id"] = df[["entity_id", "timestamp", "stock_id"]].apply(lambda x: "_".join(x.astype(str)), axis=1) + df["timestamp"] = pd.to_datetime(df["timestamp"]) + + return df + + +if __name__ == "__main__": + df = get_cs_index_stock(code="000001", name="上证指数", timestamp=now_pd_timestamp()) + print(df) + + +# the __all__ is generated +__all__ = ["get_cs_index_stock"] diff --git a/src/zvt/recorders/exchange/exchange_etf_meta_recorder.py b/src/zvt/recorders/exchange/exchange_etf_meta_recorder.py new file mode 100644 index 00000000..7581155e --- /dev/null +++ b/src/zvt/recorders/exchange/exchange_etf_meta_recorder.py @@ -0,0 +1,214 @@ +# -*- coding: utf-8 -*- + +import io +import re + +import demjson3 +import pandas as pd +import requests + +from zvt.api.utils import china_stock_code_to_id +from zvt.contract.api import df_to_db +from zvt.contract.recorder import Recorder +from zvt.domain import EtfStock, Etf +from zvt.recorders.consts import DEFAULT_SH_ETF_LIST_HEADER +from zvt.utils.time_utils import now_pd_timestamp + + +class ChinaETFListSpider(Recorder): + data_schema = EtfStock + + def __init__(self, force_update=False, sleeping_time=10.0, provider="exchange") -> None: + self.provider = provider + super().__init__(force_update, sleeping_time) + + def run(self): + # 抓取沪市 ETF 列表 + url = "http://query.sse.com.cn/commonQuery.do?sqlId=COMMON_SSE_ZQPZ_ETFLB_L_NEW" + response = requests.get(url, headers=DEFAULT_SH_ETF_LIST_HEADER) + response_dict = demjson3.decode(response.text) + + df = pd.DataFrame(response_dict.get("result", [])) + self.persist_etf_list(df, exchange="sh") + self.logger.info("沪市 ETF 列表抓取完成...") + + # 抓取沪市 ETF 成分股 + self.download_sh_etf_component(df) + self.logger.info("沪市 ETF 成分股抓取完成...") + + # 抓取深市 ETF 列表 + url = "http://www.szse.cn/api/report/ShowReport?SHOWTYPE=xlsx&CATALOGID=1945" + response = requests.get(url) + + df = pd.read_excel(io.BytesIO(response.content), dtype=str) + self.persist_etf_list(df, exchange="sz") + self.logger.info("深市 ETF 列表抓取完成...") + + # 抓取深市 ETF 成分股 + self.download_sz_etf_component(df) + self.logger.info("深市 ETF 成分股抓取完成...") + + def persist_etf_list(self, df: pd.DataFrame, exchange: str): + if df is None: + return + + df = df.copy() + if exchange == "sh": + df = df[["FUND_ID", "FUND_NAME"]] + elif exchange == "sz": + df = df[["证券代码", "证券简称"]] + + df.columns = ["code", "name"] + df["id"] = df["code"].apply(lambda code: f"etf_{exchange}_{code}") + df["entity_id"] = df["id"] + df["exchange"] = exchange + df["entity_type"] = "etf" + df["category"] = "etf" + + df = df.dropna(axis=0, how="any") + df = df.drop_duplicates(subset="id", keep="last") + + df_to_db(df=df, data_schema=Etf, provider=self.provider, force_update=False) + + def download_sh_etf_component(self, df: pd.DataFrame): + query_url = ( + "http://query.sse.com.cn/infodisplay/queryConstituentStockInfo.do?" "isPagination=false&type={}&etfClass={}" + ) + + etf_df = df[(df["ETF_CLASS"] == "1") | (df["ETF_CLASS"] == "2")] + etf_df = self.populate_sh_etf_type(etf_df) + + for _, etf in etf_df.iterrows(): + url = query_url.format(etf["ETF_TYPE"], etf["ETF_CLASS"]) + response = requests.get(url, headers=DEFAULT_SH_ETF_LIST_HEADER) + response_dict = demjson3.decode(response.text) + response_df = pd.DataFrame(response_dict.get("result", [])) + + etf_code = etf["FUND_ID"] + etf_id = f"etf_sh_{etf_code}" + response_df = response_df[["instrumentId", "instrumentName"]].copy() + response_df.rename(columns={"instrumentId": "stock_code", "instrumentName": "stock_name"}, inplace=True) + + response_df["entity_id"] = etf_id + response_df["entity_type"] = "etf" + response_df["exchange"] = "sh" + response_df["code"] = etf_code + response_df["name"] = etf["FUND_NAME"] + response_df["timestamp"] = now_pd_timestamp() + + response_df["stock_id"] = response_df["stock_code"].apply(lambda code: china_stock_code_to_id(code)) + response_df["id"] = response_df["stock_id"].apply(lambda x: f"{etf_id}_{x}") + + df_to_db(data_schema=self.data_schema, df=response_df, provider=self.provider) + self.logger.info(f'{etf["FUND_NAME"]} - {etf_code} 成分股抓取完成...') + + self.sleep() + + def download_sz_etf_component(self, df: pd.DataFrame): + query_url = "http://vip.stock.finance.sina.com.cn/corp/go.php/vII_NewestComponent/indexid/{}.phtml" + + self.parse_sz_etf_underlying_index(df) + for _, etf in df.iterrows(): + underlying_index = etf["拟合指数"] + etf_code = etf["证券代码"] + + if len(underlying_index) == 0: + self.logger.info(f'{etf["证券简称"]} - {etf_code} 非 A 股市场指数,跳过...') + continue + + url = query_url.format(underlying_index) + response = requests.get(url) + response.encoding = "gbk" + + try: + dfs = pd.read_html(response.text, header=1) + except ValueError as error: + self.logger.error(f"HTML parse error: {error}, response: {response.text}") + continue + + if len(dfs) < 4: + continue + + response_df = dfs[3].copy() + response_df = response_df.dropna(axis=1, how="any") + response_df["品种代码"] = response_df["品种代码"].apply(lambda x: f"{x:06d}") + + etf_id = f"etf_sz_{etf_code}" + response_df = response_df[["品种代码", "品种名称"]].copy() + response_df.rename(columns={"品种代码": "stock_code", "品种名称": "stock_name"}, inplace=True) + + response_df["entity_id"] = etf_id + response_df["entity_type"] = "etf" + response_df["exchange"] = "sz" + response_df["code"] = etf_code + response_df["name"] = etf["证券简称"] + response_df["timestamp"] = now_pd_timestamp() + + response_df["stock_id"] = response_df["stock_code"].apply(lambda code: china_stock_code_to_id(code)) + response_df["id"] = response_df["stock_id"].apply(lambda x: f"{etf_id}_{x}") + + df_to_db(data_schema=self.data_schema, df=response_df, provider=self.provider) + self.logger.info(f'{etf["证券简称"]} - {etf_code} 成分股抓取完成...') + + self.sleep() + + @staticmethod + def populate_sh_etf_type(df: pd.DataFrame): + """ + 填充沪市 ETF 代码对应的 TYPE 到列表数据中 + :param df: ETF 列表数据 + :return: 包含 ETF 对应 TYPE 的列表数据 + """ + query_url = ( + "http://query.sse.com.cn/infodisplay/queryETFNewAllInfo.do?" + "isPagination=false&type={}&pageHelp.pageSize=25" + ) + + type_df = pd.DataFrame() + for etf_class in [1, 2]: + url = query_url.format(etf_class) + response = requests.get(url, headers=DEFAULT_SH_ETF_LIST_HEADER) + response_dict = demjson3.decode(response.text) + response_df = pd.DataFrame(response_dict.get("result", [])) + response_df = response_df[["fundid1", "etftype"]] + + type_df = pd.concat([type_df, response_df]) + + result_df = df.copy() + result_df = result_df.sort_values(by="FUND_ID").reset_index(drop=True) + type_df = type_df.sort_values(by="fundid1").reset_index(drop=True) + + result_df["ETF_TYPE"] = type_df["etftype"] + + return result_df + + @staticmethod + def parse_sz_etf_underlying_index(df: pd.DataFrame): + """ + 解析深市 ETF 对应跟踪的指数代码 + :param df: ETF 列表数据 + :return: 解析完成 ETF 对应指数代码的列表数据 + """ + + def parse_index(text): + if len(text) == 0: + return "" + + result = re.search(r"(\d+).*", text) + if result is None: + return "" + else: + return result.group(1) + + df["拟合指数"] = df["拟合指数"].apply(parse_index) + + +__all__ = ["ChinaETFListSpider"] + +if __name__ == "__main__": + spider = ChinaETFListSpider(provider="exchange") + spider.run() + + +# the __all__ is generated +__all__ = ["ChinaETFListSpider"] diff --git a/src/zvt/recorders/exchange/exchange_index_recorder.py b/src/zvt/recorders/exchange/exchange_index_recorder.py new file mode 100644 index 00000000..15c6c1ac --- /dev/null +++ b/src/zvt/recorders/exchange/exchange_index_recorder.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- + +from zvt.contract.api import df_to_db +from zvt.contract.recorder import Recorder +from zvt.domain import Index +from zvt.recorders.exchange.api import cn_index_api, cs_index_api + + +class ExchangeIndexRecorder(Recorder): + provider = "exchange" + data_schema = Index + + def run(self): + # 深圳 + self.record_cn_index("sz") + # 国证 + self.record_cn_index("cni") + + # 上海 + self.record_cs_index("sh") + # 中证 + self.record_cs_index("csi") + + # 中证,上海 + def record_cs_index(self, index_type): + df = cs_index_api.get_cs_index(index_type=index_type) + df_to_db(data_schema=self.data_schema, df=df, provider=self.provider, force_update=True) + self.logger.info(f"finish record {index_type} index") + + # 国证,深圳 + def record_cn_index(self, index_type): + if index_type == "cni": + category_map_url = cn_index_api.cni_category_map_url + elif index_type == "sz": + category_map_url = cn_index_api.sz_category_map_url + else: + self.logger.error(f"not support index_type: {index_type}") + assert False + + for category, _ in category_map_url.items(): + df = cn_index_api.get_cn_index(index_type=index_type, category=category) + df_to_db(data_schema=self.data_schema, df=df, provider=self.provider, force_update=True) + self.logger.info(f"finish record {index_type} index:{category.value}") + + +if __name__ == "__main__": + # init_log('china_stock_category.log') + ExchangeIndexRecorder().run() + + +# the __all__ is generated +__all__ = ["ExchangeIndexRecorder"] diff --git a/src/zvt/recorders/exchange/exchange_index_stock_recorder.py b/src/zvt/recorders/exchange/exchange_index_stock_recorder.py new file mode 100644 index 00000000..141bfb0c --- /dev/null +++ b/src/zvt/recorders/exchange/exchange_index_stock_recorder.py @@ -0,0 +1,83 @@ +# -*- coding: utf-8 -*- + +from typing import List + +import pandas as pd + +from zvt.contract.api import df_to_db +from zvt.contract.recorder import TimestampsDataRecorder +from zvt.domain import Index, IndexStock +from zvt.recorders.exchange.api import cs_index_stock_api, cn_index_stock_api +from zvt.utils.time_utils import pre_month_start_date +from zvt.utils.time_utils import to_pd_timestamp + + +class ExchangeIndexStockRecorder(TimestampsDataRecorder): + entity_provider = "exchange" + entity_schema = Index + + provider = "exchange" + data_schema = IndexStock + + def __init__( + self, + force_update=False, + sleeping_time=5, + exchanges=None, + entity_id=None, + entity_ids=None, + code=None, + codes=None, + day_data=False, + entity_filters=None, + ignore_failed=True, + real_time=False, + fix_duplicate_way="add", + start_timestamp=None, + end_timestamp=None, + record_history=False, + ) -> None: + super().__init__( + force_update, + sleeping_time, + exchanges, + entity_id, + entity_ids, + code, + codes, + day_data, + entity_filters, + ignore_failed, + real_time, + fix_duplicate_way, + start_timestamp, + end_timestamp, + ) + self.record_history = record_history + + def init_timestamps(self, entity_item) -> List[pd.Timestamp]: + last_valid_date = pre_month_start_date() + if self.record_history: + # 每个月记录一次 + return [to_pd_timestamp(item) for item in pd.date_range(entity_item.list_date, last_valid_date, freq="M")] + else: + return [last_valid_date] + + def record(self, entity, start, end, size, timestamps): + if entity.publisher == "cnindex": + for timestamp in timestamps: + df = cn_index_stock_api.get_cn_index_stock(code=entity.code, timestamp=timestamp, name=entity.name) + df_to_db(data_schema=self.data_schema, df=df, provider=self.provider, force_update=True) + elif entity.publisher == "csindex": + # cs index not support history data + df = cs_index_stock_api.get_cs_index_stock(code=entity.code, timestamp=None, name=entity.name) + df_to_db(data_schema=self.data_schema, df=df, provider=self.provider, force_update=True) + + +if __name__ == "__main__": + # ExchangeIndexMetaRecorder().run() + ExchangeIndexStockRecorder(codes=["399370"]).run() + + +# the __all__ is generated +__all__ = ["ExchangeIndexStockRecorder"] diff --git a/src/zvt/recorders/exchange/exchange_stock_meta_recorder.py b/src/zvt/recorders/exchange/exchange_stock_meta_recorder.py new file mode 100644 index 00000000..a708fc9e --- /dev/null +++ b/src/zvt/recorders/exchange/exchange_stock_meta_recorder.py @@ -0,0 +1,98 @@ +# -*- coding: utf-8 -*- + +import io + +import pandas as pd +import requests + +from zvt.contract.api import df_to_db +from zvt.contract.recorder import Recorder +from zvt.domain import Stock, StockDetail +from zvt.recorders.consts import DEFAULT_SH_HEADER, DEFAULT_SZ_HEADER +from zvt.utils.time_utils import to_pd_timestamp + + +class ExchangeStockMetaRecorder(Recorder): + data_schema = Stock + provider = "exchange" + + original_page_url = "http://www.sse.com.cn/assortment/stock/list/share/" + + def run(self): + url = ( + "http://query.sse.com.cn/security/stock/downloadStockListFile.do?csrcCode=&stockCode=&areaName=&stockType=1" + ) + resp = requests.get(url, headers=DEFAULT_SH_HEADER) + self.download_stock_list(response=resp, exchange="sh") + + url = ( + "http://query.sse.com.cn/security/stock/downloadStockListFile.do?csrcCode=&stockCode=&areaName=&stockType=8" + ) + resp = requests.get(url, headers=DEFAULT_SH_HEADER) + self.download_stock_list(response=resp, exchange="sh") + + url = "http://www.szse.cn/api/report/ShowReport?SHOWTYPE=xlsx&CATALOGID=1110&TABKEY=tab1&random=0.20932135244582617" + resp = requests.get(url, headers=DEFAULT_SZ_HEADER) + self.download_stock_list(response=resp, exchange="sz") + + def download_stock_list(self, response, exchange): + df = None + if exchange == "sh": + df = pd.read_csv( + io.BytesIO(response.content), + sep="\s+", + encoding="GB2312", + dtype=str, + parse_dates=["上市日期"], + date_format="%Y-m-d", + on_bad_lines="skip", + ) + print(df) + if df is not None: + df = df.loc[:, ["公司代码", "公司简称", "上市日期"]] + + elif exchange == "sz": + df = pd.read_excel( + io.BytesIO(response.content), + sheet_name="A股列表", + dtype=str, + parse_dates=["A股上市日期"], + date_format="%Y-m-d", + ) + if df is not None: + df = df.loc[:, ["A股代码", "A股简称", "A股上市日期"]] + + if df is not None: + df.columns = ["code", "name", "list_date"] + + df = df.dropna(subset=["code"]) + + # handle the dirty data + # 600996,贵广网络,2016-12-26,2016-12-26,sh,stock,stock_sh_600996,,次新股,贵州,, + df.loc[df["code"] == "600996", "list_date"] = "2016-12-26" + print(df[df["list_date"] == "-"]) + print(df["list_date"]) + df["list_date"] = df["list_date"].apply(lambda x: to_pd_timestamp(x)) + df["exchange"] = exchange + df["entity_type"] = "stock" + df["id"] = df[["entity_type", "exchange", "code"]].apply(lambda x: "_".join(x.astype(str)), axis=1) + df["entity_id"] = df["id"] + df["timestamp"] = df["list_date"] + df = df.dropna(axis=0, how="any") + df = df.drop_duplicates(subset=("id"), keep="last") + df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=False) + # persist StockDetail too + df_to_db(df=df, data_schema=StockDetail, provider=self.provider, force_update=False) + self.logger.info(df.tail()) + self.logger.info("persist stock list successs") + + +__all__ = ["ExchangeStockMetaRecorder"] + +if __name__ == "__main__": + recorder = ExchangeStockMetaRecorder() + recorder.run() + + +# the __all__ is generated +__all__ = ["ExchangeStockMetaRecorder"] diff --git a/src/zvt/recorders/exchange/exchange_stock_summary_recorder.py b/src/zvt/recorders/exchange/exchange_stock_summary_recorder.py new file mode 100644 index 00000000..2d115783 --- /dev/null +++ b/src/zvt/recorders/exchange/exchange_stock_summary_recorder.py @@ -0,0 +1,100 @@ +import demjson3 +import pandas as pd +import requests + +from zvt.contract.recorder import TimestampsDataRecorder +from zvt.domain import Index +from zvt.domain.misc import StockSummary +from zvt.recorders.consts import DEFAULT_SH_SUMMARY_HEADER +from zvt.utils.time_utils import to_time_str +from zvt.utils.utils import to_float + + +class ExchangeStockSummaryRecorder(TimestampsDataRecorder): + entity_provider = "exchange" + entity_schema = Index + + provider = "exchange" + data_schema = StockSummary + + original_page_url = "http://www.sse.com.cn/market/stockdata/overview/day/" + + url = "http://query.sse.com.cn/marketdata/tradedata/queryTradingByProdTypeData.do?jsonCallBack=jsonpCallback30731&searchDate={}&prodType=gp&_=1515717065511" + + def __init__( + self, + force_update=False, + sleeping_time=5, + exchanges=None, + entity_id=None, + entity_ids=None, + code=None, + day_data=False, + entity_filters=None, + ignore_failed=True, + real_time=False, + fix_duplicate_way="add", + start_timestamp=None, + end_timestamp=None, + ) -> None: + super().__init__( + force_update, + sleeping_time, + exchanges, + entity_id, + entity_ids, + code, + ["000001"], + day_data, + entity_filters, + ignore_failed, + real_time, + fix_duplicate_way, + start_timestamp, + end_timestamp, + ) + + def init_timestamps(self, entity): + return pd.date_range(start=entity.timestamp, end=pd.Timestamp.now(), freq="B").tolist() + + def record(self, entity, start, end, size, timestamps): + json_results = [] + for timestamp in timestamps: + timestamp_str = to_time_str(timestamp) + url = self.url.format(timestamp_str) + response = requests.get(url=url, headers=DEFAULT_SH_SUMMARY_HEADER) + + results = demjson3.decode(response.text[response.text.index("(") + 1 : response.text.index(")")])["result"] + result = [result for result in results if result["productType"] == "1"] + if result and len(result) == 1: + result_json = result[0] + # 有些较老的数据不存在,默认设为0.0 + json_results.append( + { + "provider": "exchange", + "timestamp": timestamp, + "name": "上证指数", + "pe": to_float(result_json["profitRate"], 0.0), + "total_value": to_float(result_json["marketValue1"] + "亿", 0.0), + "total_tradable_vaule": to_float(result_json["negotiableValue1"] + "亿", 0.0), + "volume": to_float(result_json["trdVol1"] + "万", 0.0), + "turnover": to_float(result_json["trdAmt1"] + "亿", 0.0), + "turnover_rate": to_float(result_json["exchangeRate"], 0.0), + } + ) + + if len(json_results) > 30: + return json_results + + return json_results + + def get_data_map(self): + return None + + +if __name__ == "__main__": + ExchangeStockSummaryRecorder().run() + + +# the __all__ is generated +__all__ = ["ExchangeStockSummaryRecorder"] diff --git a/zvt/recorders/joinquant/__init__.py b/src/zvt/recorders/joinquant/__init__.py similarity index 96% rename from zvt/recorders/joinquant/__init__.py rename to src/zvt/recorders/joinquant/__init__.py index 969efda5..6854b560 100644 --- a/zvt/recorders/joinquant/__init__.py +++ b/src/zvt/recorders/joinquant/__init__.py @@ -1,4 +1,6 @@ # -*- coding: utf-8 -*- + + # the __all__ is generated __all__ = [] @@ -6,32 +8,38 @@ # common code of the package # export interface in __all__ which contains __all__ of its sub modules +# import all from submodule overall +from .overall import * +from .overall import __all__ as _overall_all + +__all__ += _overall_all + +# import all from submodule fundamental +from .fundamental import * +from .fundamental import __all__ as _fundamental_all + +__all__ += _fundamental_all + # import all from submodule misc from .misc import * from .misc import __all__ as _misc_all + __all__ += _misc_all # import all from submodule quotes from .quotes import * from .quotes import __all__ as _quotes_all -__all__ += _quotes_all - -# import all from submodule meta -from .meta import * -from .meta import __all__ as _meta_all -__all__ += _meta_all -# import all from submodule fundamental -from .fundamental import * -from .fundamental import __all__ as _fundamental_all -__all__ += _fundamental_all +__all__ += _quotes_all # import all from submodule common from .common import * from .common import __all__ as _common_all + __all__ += _common_all -# import all from submodule overall -from .overall import * -from .overall import __all__ as _overall_all -__all__ += _overall_all \ No newline at end of file +# import all from submodule meta +from .meta import * +from .meta import __all__ as _meta_all + +__all__ += _meta_all diff --git a/zvt/recorders/joinquant/common.py b/src/zvt/recorders/joinquant/common.py similarity index 50% rename from zvt/recorders/joinquant/common.py rename to src/zvt/recorders/joinquant/common.py index 5eb818f4..bea9fda5 100644 --- a/zvt/recorders/joinquant/common.py +++ b/src/zvt/recorders/joinquant/common.py @@ -8,54 +8,54 @@ def to_jq_trading_level(trading_level: IntervalLevel): return trading_level.value if trading_level == IntervalLevel.LEVEL_1HOUR: - return '60m' + return "60m" if trading_level == IntervalLevel.LEVEL_4HOUR: - return '240m' + return "240m" if trading_level == IntervalLevel.LEVEL_1DAY: - return '1d' + return "1d" if trading_level == IntervalLevel.LEVEL_1WEEK: - return '1w' + return "1w" if trading_level == IntervalLevel.LEVEL_1MON: - return '1M' + return "1M" def to_jq_entity_id(security_item): - if security_item.entity_type == 'stock' or security_item.entity_type == 'index': - if security_item.exchange == 'sh': - return '{}.XSHG'.format(security_item.code) - if security_item.exchange == 'sz': - return '{}.XSHE'.format(security_item.code) + if security_item.entity_type == "stock" or security_item.entity_type == "index": + if security_item.exchange == "sh": + return "{}.XSHG".format(security_item.code) + if security_item.exchange == "sz": + return "{}.XSHE".format(security_item.code) def to_entity_id(jq_code: str, entity_type): try: - code, exchange = jq_code.split('.') - if exchange == 'XSHG': - exchange = 'sh' - elif exchange == 'XSHE': - exchange = 'sz' + code, exchange = jq_code.split(".") + if exchange == "XSHG": + exchange = "sh" + elif exchange == "XSHE": + exchange = "sz" except: code = jq_code - exchange = 'sz' + exchange = "sz" - return f'{entity_type}_{exchange}_{code}' + return f"{entity_type}_{exchange}_{code}" def jq_to_report_period(jq_report_type): - if jq_report_type == '第一季度': + if jq_report_type == "第一季度": return ReportPeriod.season1.value - if jq_report_type == '第二季度': + if jq_report_type == "第二季度": return ReportPeriod.season2.value - if jq_report_type == '第三季度': + if jq_report_type == "第三季度": return ReportPeriod.season3.value - if jq_report_type == '第四季度': + if jq_report_type == "第四季度": return ReportPeriod.season4.value - if jq_report_type == '半年度': + if jq_report_type == "半年度": return ReportPeriod.half_year.value - if jq_report_type == '年度': + if jq_report_type == "年度": return ReportPeriod.year.value assert False # the __all__ is generated -__all__ = ['to_jq_trading_level', 'to_jq_entity_id', 'to_entity_id', 'jq_to_report_period'] \ No newline at end of file +__all__ = ["to_jq_trading_level", "to_jq_entity_id", "to_entity_id", "jq_to_report_period"] diff --git a/src/zvt/recorders/joinquant/fundamental/__init__.py b/src/zvt/recorders/joinquant/fundamental/__init__.py new file mode 100644 index 00000000..042135c4 --- /dev/null +++ b/src/zvt/recorders/joinquant/fundamental/__init__.py @@ -0,0 +1,27 @@ +# -*- coding: utf-8 -*- + + +# the __all__ is generated +__all__ = [] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule jq_margin_trading_recorder +from .jq_margin_trading_recorder import * +from .jq_margin_trading_recorder import __all__ as _jq_margin_trading_recorder_all + +__all__ += _jq_margin_trading_recorder_all + +# import all from submodule jq_stock_valuation_recorder +from .jq_stock_valuation_recorder import * +from .jq_stock_valuation_recorder import __all__ as _jq_stock_valuation_recorder_all + +__all__ += _jq_stock_valuation_recorder_all + +# import all from submodule jq_etf_valuation_recorder +from .jq_etf_valuation_recorder import * +from .jq_etf_valuation_recorder import __all__ as _jq_etf_valuation_recorder_all + +__all__ += _jq_etf_valuation_recorder_all diff --git a/zvt/recorders/joinquant/fundamental/etf_valuation_recorder.py b/src/zvt/recorders/joinquant/fundamental/jq_etf_valuation_recorder.py similarity index 53% rename from zvt/recorders/joinquant/fundamental/etf_valuation_recorder.py rename to src/zvt/recorders/joinquant/fundamental/jq_etf_valuation_recorder.py index 87095061..8e9ebcb4 100644 --- a/zvt/recorders/joinquant/fundamental/etf_valuation_recorder.py +++ b/src/zvt/recorders/joinquant/fundamental/jq_etf_valuation_recorder.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- import pandas as pd -from zvt.api.quote import get_etf_stocks +from zvt.api.portfolio import get_etf_stocks from zvt.contract.api import df_to_db from zvt.contract.recorder import TimeSeriesDataRecorder from zvt.domain import StockValuation, Etf, EtfValuation @@ -10,64 +10,63 @@ class JqChinaEtfValuationRecorder(TimeSeriesDataRecorder): - entity_provider = 'joinquant' + entity_provider = "joinquant" entity_schema = Etf # 数据来自jq - provider = 'joinquant' + provider = "joinquant" data_schema = EtfValuation - def __init__(self, entity_type='etf', exchanges=None, entity_ids=None, codes=None, day_data=True, batch_size=10, - force_update=False, sleeping_time=5, default_size=2000, real_time=False, fix_duplicate_way='add', - start_timestamp=None, end_timestamp=None, close_hour=0, close_minute=0) -> None: - super().__init__(entity_type, exchanges, entity_ids, codes, day_data, batch_size, force_update, sleeping_time, - default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour, - close_minute) - def record(self, entity, start, end, size, timestamps): if not end: end = now_pd_timestamp() - date_range = pd.date_range(start=start, end=end, freq='1D').tolist() + date_range = pd.date_range(start=start, end=end, freq="1D").tolist() for date in date_range: # etf包含的个股和比例 etf_stock_df = get_etf_stocks(code=entity.code, timestamp=date, provider=self.provider) if pd_is_not_null(etf_stock_df): - all_pct = etf_stock_df['proportion'].sum() + all_pct = etf_stock_df["proportion"].sum() if all_pct >= 1.2 or all_pct <= 0.8: - self.logger.error(f'ignore etf:{entity.id} date:{date} proportion sum:{all_pct}') + self.logger.error(f"ignore etf:{entity.id} date:{date} proportion sum:{all_pct}") break - etf_stock_df.set_index('stock_id', inplace=True) + etf_stock_df.set_index("stock_id", inplace=True) # 个股的估值数据 - stock_valuation_df = StockValuation.query_data(entity_ids=etf_stock_df.index.to_list(), - filters=[StockValuation.timestamp == date], - index='entity_id') + stock_valuation_df = StockValuation.query_data( + entity_ids=etf_stock_df.index.to_list(), + filters=[StockValuation.timestamp == date], + index="entity_id", + ) if pd_is_not_null(stock_valuation_df): stock_count = len(etf_stock_df) valuation_count = len(stock_valuation_df) self.logger.info( - f'etf:{entity.id} date:{date} stock count: {stock_count},' - f'valuation count:{valuation_count}') + f"etf:{entity.id} date:{date} stock count: {stock_count}," f"valuation count:{valuation_count}" + ) pct = abs(stock_count - valuation_count) / stock_count if pct >= 0.2: - self.logger.error(f'ignore etf:{entity.id} date:{date} pct:{pct}') + self.logger.error(f"ignore etf:{entity.id} date:{date} pct:{pct}") break - se = pd.Series({'id': "{}_{}".format(entity.id, date), - 'entity_id': entity.id, - 'timestamp': date, - 'code': entity.code, - 'name': entity.name}) - for col in ['pe', 'pe_ttm', 'pb', 'ps', 'pcf']: + se = pd.Series( + { + "id": "{}_{}".format(entity.id, date), + "entity_id": entity.id, + "timestamp": date, + "code": entity.code, + "name": entity.name, + } + ) + for col in ["pe", "pe_ttm", "pb", "ps", "pcf"]: # PE=P/E # 这里的算法为:将其价格都设为PE,那么Earning为1(亏钱为-1),结果为 总价格(PE)/总Earning @@ -76,20 +75,20 @@ def record(self, entity, start, end, size, timestamps): # 权重估值 positive_df = stock_valuation_df[[col]][stock_valuation_df[col] > 0] - positive_df['count'] = 1 + positive_df["count"] = 1 positive_df = positive_df.multiply(etf_stock_df["proportion"], axis="index") if pd_is_not_null(positive_df): - value = positive_df['count'].sum() + value = positive_df["count"].sum() price = positive_df[col].sum() negative_df = stock_valuation_df[[col]][stock_valuation_df[col] < 0] if pd_is_not_null(negative_df): - negative_df['count'] = 1 + negative_df["count"] = 1 negative_df = negative_df.multiply(etf_stock_df["proportion"], axis="index") - value = value - negative_df['count'].sum() + value = value - negative_df["count"].sum() price = price + negative_df[col].sum() - se[f'{col}1'] = price / value + se[f"{col}1"] = price / value # 简单算术平均估值 positive_df = stock_valuation_df[col][stock_valuation_df[col] > 0] @@ -106,14 +105,17 @@ def record(self, entity, start, end, size, timestamps): self.logger.info(df) - df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, - force_update=self.force_update) + df_to_db( + df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update + ) return None -if __name__ == '__main__': +if __name__ == "__main__": # 上证50 - JqChinaEtfValuationRecorder(codes=['512290']).run() + JqChinaEtfValuationRecorder(codes=["512290"]).run() + + # the __all__ is generated -__all__ = ['JqChinaEtfValuationRecorder'] \ No newline at end of file +__all__ = ["JqChinaEtfValuationRecorder"] diff --git a/src/zvt/recorders/joinquant/fundamental/jq_margin_trading_recorder.py b/src/zvt/recorders/joinquant/fundamental/jq_margin_trading_recorder.py new file mode 100644 index 00000000..40f41a5f --- /dev/null +++ b/src/zvt/recorders/joinquant/fundamental/jq_margin_trading_recorder.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- + +import pandas as pd +from jqdatapy.api import get_mtss + +from zvt.contract.api import df_to_db +from zvt.contract.recorder import TimeSeriesDataRecorder +from zvt.domain import Stock, MarginTrading +from zvt.recorders.joinquant.common import to_jq_entity_id +from zvt.utils.pd_utils import pd_is_not_null +from zvt.utils.time_utils import to_time_str, TIME_FORMAT_DAY + + +class MarginTradingRecorder(TimeSeriesDataRecorder): + entity_provider = "joinquant" + entity_schema = Stock + + # 数据来自jq + provider = "joinquant" + + data_schema = MarginTrading + + def record(self, entity, start, end, size, timestamps): + df = get_mtss(code=to_jq_entity_id(entity), date=to_time_str(start)) + + if pd_is_not_null(df): + df["entity_id"] = entity.id + df["code"] = entity.code + df.rename(columns={"date": "timestamp"}, inplace=True) + df["timestamp"] = pd.to_datetime(df["timestamp"]) + df["id"] = df[["entity_id", "timestamp"]].apply( + lambda se: "{}_{}".format(se["entity_id"], to_time_str(se["timestamp"], fmt=TIME_FORMAT_DAY)), axis=1 + ) + + print(df) + df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) + + return None + + +if __name__ == "__main__": + MarginTradingRecorder(codes=["000004"]).run() + + +# the __all__ is generated +__all__ = ["MarginTradingRecorder"] diff --git a/src/zvt/recorders/joinquant/fundamental/jq_stock_valuation_recorder.py b/src/zvt/recorders/joinquant/fundamental/jq_stock_valuation_recorder.py new file mode 100644 index 00000000..7c5a8212 --- /dev/null +++ b/src/zvt/recorders/joinquant/fundamental/jq_stock_valuation_recorder.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- + +import pandas as pd +from jqdatapy.api import get_fundamentals +from pandas._libs.tslibs.timedeltas import Timedelta + +from zvt.contract.api import df_to_db +from zvt.contract.recorder import TimeSeriesDataRecorder +from zvt.domain import Stock, StockValuation, Etf +from zvt.recorders.joinquant.common import to_jq_entity_id +from zvt.utils.time_utils import now_pd_timestamp, to_time_str, to_pd_timestamp + + +class JqChinaStockValuationRecorder(TimeSeriesDataRecorder): + entity_provider = "joinquant" + entity_schema = Stock + + # 数据来自jq + provider = "joinquant" + + data_schema = StockValuation + + def record(self, entity, start, end, size, timestamps): + start = max(start, to_pd_timestamp("2005-01-01")) + end = min(now_pd_timestamp(), start + Timedelta(days=500)) + + count: Timedelta = end - start + + # df = get_fundamentals_continuously(q, end_date=now_time_str(), count=count.days + 1, panel=False) + df = get_fundamentals( + table="valuation", code=to_jq_entity_id(entity), date=to_time_str(end), count=min(count.days, 500) + ) + df["entity_id"] = entity.id + df["timestamp"] = pd.to_datetime(df["day"]) + df["code"] = entity.code + df["name"] = entity.name + df["id"] = df["timestamp"].apply(lambda x: "{}_{}".format(entity.id, to_time_str(x))) + df = df.rename( + {"pe_ratio_lyr": "pe", "pe_ratio": "pe_ttm", "pb_ratio": "pb", "ps_ratio": "ps", "pcf_ratio": "pcf"}, + axis="columns", + ) + + df["market_cap"] = df["market_cap"] * 100000000 + df["circulating_market_cap"] = df["circulating_market_cap"] * 100000000 + df["capitalization"] = df["capitalization"] * 10000 + df["circulating_cap"] = df["circulating_cap"] * 10000 + df["turnover_ratio"] = df["turnover_ratio"] * 0.01 + df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) + + return None + + +if __name__ == "__main__": + # 上证50 + df = Etf.get_stocks(code="510050") + stocks = df.stock_id.tolist() + print(stocks) + print(len(stocks)) + + JqChinaStockValuationRecorder(entity_ids=["stock_sz_300999"], force_update=True).run() + + +# the __all__ is generated +__all__ = ["JqChinaStockValuationRecorder"] diff --git a/src/zvt/recorders/joinquant/meta/__init__.py b/src/zvt/recorders/joinquant/meta/__init__.py new file mode 100644 index 00000000..d4565725 --- /dev/null +++ b/src/zvt/recorders/joinquant/meta/__init__.py @@ -0,0 +1,27 @@ +# -*- coding: utf-8 -*- + + +# the __all__ is generated +__all__ = [] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule jq_fund_meta_recorder +from .jq_fund_meta_recorder import * +from .jq_fund_meta_recorder import __all__ as _jq_fund_meta_recorder_all + +__all__ += _jq_fund_meta_recorder_all + +# import all from submodule jq_stock_meta_recorder +from .jq_stock_meta_recorder import * +from .jq_stock_meta_recorder import __all__ as _jq_stock_meta_recorder_all + +__all__ += _jq_stock_meta_recorder_all + +# import all from submodule jq_trade_day_recorder +from .jq_trade_day_recorder import * +from .jq_trade_day_recorder import __all__ as _jq_trade_day_recorder_all + +__all__ += _jq_trade_day_recorder_all diff --git a/src/zvt/recorders/joinquant/meta/jq_fund_meta_recorder.py b/src/zvt/recorders/joinquant/meta/jq_fund_meta_recorder.py new file mode 100644 index 00000000..f88844f0 --- /dev/null +++ b/src/zvt/recorders/joinquant/meta/jq_fund_meta_recorder.py @@ -0,0 +1,152 @@ +# -*- coding: utf-8 -*- +import pandas as pd +from jqdatapy.api import run_query + +from zvt.api.portfolio import portfolio_relate_stock +from zvt.api.utils import china_stock_code_to_id +from zvt.contract.api import df_to_db +from zvt.contract.recorder import Recorder, TimeSeriesDataRecorder +from zvt.domain.meta.fund_meta import Fund, FundStock +from zvt.recorders.joinquant.common import to_entity_id, jq_to_report_period +from zvt.utils.pd_utils import pd_is_not_null +from zvt.utils.time_utils import to_time_str, date_time_by_interval, now_pd_timestamp, is_same_date + + +class JqChinaFundRecorder(Recorder): + provider = "joinquant" + data_schema = Fund + + def run(self): + # 按不同类别抓取 + # 编码 基金运作方式 + # 401001 开放式基金 + # 401002 封闭式基金 + # 401003 QDII + # 401004 FOF + # 401005 ETF + # 401006 LOF + for operate_mode_id in (401001, 401002, 401005): + year_count = 2 + while True: + latest = Fund.query_data( + filters=[Fund.operate_mode_id == operate_mode_id], + order=Fund.timestamp.desc(), + limit=1, + return_type="domain", + ) + start_timestamp = "2000-01-01" + if latest: + start_timestamp = latest[0].timestamp + + end_timestamp = min(date_time_by_interval(start_timestamp, 365 * year_count), now_pd_timestamp()) + + df = run_query( + table="finance.FUND_MAIN_INFO", + conditions=f"operate_mode_id#=#{operate_mode_id}&start_date#>=#{to_time_str(start_timestamp)}&start_date#<=#{to_time_str(end_timestamp)}", + parse_dates=["start_date", "end_date"], + dtype={"main_code": str}, + ) + if not pd_is_not_null(df) or (df["start_date"].max().year < end_timestamp.year): + year_count = year_count + 1 + + if pd_is_not_null(df): + df.rename(columns={"start_date": "timestamp"}, inplace=True) + df["timestamp"] = pd.to_datetime(df["timestamp"]) + df["list_date"] = df["timestamp"] + df["end_date"] = pd.to_datetime(df["end_date"]) + + df["code"] = df["main_code"] + df["entity_id"] = df["code"].apply(lambda x: to_entity_id(entity_type="fund", jq_code=x)) + df["id"] = df["entity_id"] + df["entity_type"] = "fund" + df["exchange"] = "sz" + df_to_db(df, data_schema=Fund, provider=self.provider, force_update=self.force_update) + self.logger.info( + f"persist fund {operate_mode_id} list success {start_timestamp} to {end_timestamp}" + ) + + if is_same_date(end_timestamp, now_pd_timestamp()): + break + + +class JqChinaFundStockRecorder(TimeSeriesDataRecorder): + entity_provider = "joinquant" + entity_schema = Fund + + provider = "joinquant" + data_schema = FundStock + + def init_entities(self): + # 只抓股票型,混合型并且没退市的持仓, + self.entities = Fund.query_data( + entity_ids=self.entity_ids, + codes=self.codes, + return_type="domain", + provider=self.entity_provider, + filters=[Fund.underlying_asset_type.in_(("股票型", "混合型")), Fund.end_date.is_(None)], + ) + + def record(self, entity, start, end, size, timestamps): + # 忽略退市的 + if entity.end_date: + return None + redundant_times = 1 + while redundant_times > 0: + df = run_query( + table="finance.FUND_PORTFOLIO_STOCK", + conditions=f"pub_date#>=#{to_time_str(start)}&code#=#{entity.code}", + parse_dates=None, + ) + df = df.dropna() + if pd_is_not_null(df): + # data format + # id code period_start period_end pub_date report_type_id report_type rank symbol name shares market_cap proportion + # 0 8640569 159919 2018-07-01 2018-09-30 2018-10-26 403003 第三季度 1 601318 中国平安 19869239.0 1.361043e+09 7.09 + # 1 8640570 159919 2018-07-01 2018-09-30 2018-10-26 403003 第三季度 2 600519 贵州茅台 921670.0 6.728191e+08 3.50 + # 2 8640571 159919 2018-07-01 2018-09-30 2018-10-26 403003 第三季度 3 600036 招商银行 18918815.0 5.806184e+08 3.02 + # 3 8640572 159919 2018-07-01 2018-09-30 2018-10-26 403003 第三季度 4 601166 兴业银行 22862332.0 3.646542e+08 1.90 + df["timestamp"] = pd.to_datetime(df["pub_date"]) + + df.rename(columns={"symbol": "stock_code", "name": "stock_name"}, inplace=True) + df["proportion"] = df["proportion"] * 0.01 + + df = portfolio_relate_stock(df, entity) + + df["stock_id"] = df["stock_code"].apply(lambda x: china_stock_code_to_id(x)) + df["id"] = df[["entity_id", "stock_id", "pub_date", "id"]].apply( + lambda x: "_".join(x.astype(str)), axis=1 + ) + df["report_date"] = pd.to_datetime(df["period_end"]) + df["report_period"] = df["report_type"].apply(lambda x: jq_to_report_period(x)) + + saved = df_to_db( + df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update + ) + + # 取不到非重复的数据 + if saved == 0: + return None + + # self.logger.info(df.tail()) + self.logger.info( + f"persist fund {entity.code}({entity.name}) portfolio success {df.iloc[-1]['pub_date']}" + ) + latest = df["timestamp"].max() + + # 取到了最近两年的数据,再请求一次,确保取完最新的数据 + if latest.year >= now_pd_timestamp().year - 1: + redundant_times = redundant_times - 1 + start = latest + else: + return None + + return None + + +if __name__ == "__main__": + # JqChinaFundRecorder().run() + JqChinaFundStockRecorder(codes=["000053"]).run() + + +# the __all__ is generated +__all__ = ["JqChinaFundRecorder", "JqChinaFundStockRecorder"] diff --git a/zvt/recorders/joinquant/meta/china_stock_meta_recorder.py b/src/zvt/recorders/joinquant/meta/jq_stock_meta_recorder.py similarity index 53% rename from zvt/recorders/joinquant/meta/china_stock_meta_recorder.py rename to src/zvt/recorders/joinquant/meta/jq_stock_meta_recorder.py index fa4a0c43..e9a5f1ec 100644 --- a/zvt/recorders/joinquant/meta/china_stock_meta_recorder.py +++ b/src/zvt/recorders/joinquant/meta/jq_stock_meta_recorder.py @@ -2,7 +2,8 @@ import pandas as pd from jqdatapy.api import get_all_securities, run_query -from zvt.api.quote import china_stock_code_to_id, portfolio_relate_stock +from zvt.api.portfolio import portfolio_relate_stock +from zvt.api.utils import china_stock_code_to_id from zvt.contract.api import df_to_db, get_entity_exchange, get_entity_code from zvt.contract.recorder import Recorder, TimeSeriesDataRecorder from zvt.domain import EtfStock, Stock, Etf, StockDetail @@ -12,30 +13,30 @@ class BaseJqChinaMetaRecorder(Recorder): - provider = 'joinquant' + provider = "joinquant" - def __init__(self, batch_size=10, force_update=True, sleeping_time=10) -> None: - super().__init__(batch_size, force_update, sleeping_time) + def __init__(self, force_update=True, sleeping_time=10) -> None: + super().__init__(force_update, sleeping_time) def to_zvt_entity(self, df, entity_type, category=None): - df = df.set_index('code') - df.index.name = 'entity_id' + df = df.set_index("code") + df.index.name = "entity_id" df = df.reset_index() # 上市日期 - df.rename(columns={'start_date': 'timestamp'}, inplace=True) - df['timestamp'] = pd.to_datetime(df['timestamp']) - df['list_date'] = df['timestamp'] - df['end_date'] = pd.to_datetime(df['end_date']) - - df['entity_id'] = df['entity_id'].apply(lambda x: to_entity_id(entity_type=entity_type, jq_code=x)) - df['id'] = df['entity_id'] - df['entity_type'] = entity_type - df['exchange'] = df['entity_id'].apply(lambda x: get_entity_exchange(x)) - df['code'] = df['entity_id'].apply(lambda x: get_entity_code(x)) - df['name'] = df['display_name'] + df.rename(columns={"start_date": "timestamp"}, inplace=True) + df["timestamp"] = pd.to_datetime(df["timestamp"]) + df["list_date"] = df["timestamp"] + df["end_date"] = pd.to_datetime(df["end_date"]) + + df["entity_id"] = df["entity_id"].apply(lambda x: to_entity_id(entity_type=entity_type, jq_code=x)) + df["id"] = df["entity_id"] + df["entity_type"] = entity_type + df["exchange"] = df["entity_id"].apply(lambda x: get_entity_exchange(x)) + df["code"] = df["entity_id"].apply(lambda x: get_entity_code(x)) + df["name"] = df["display_name"] if category: - df['category'] = category + df["category"] = category return df @@ -45,7 +46,7 @@ class JqChinaStockRecorder(BaseJqChinaMetaRecorder): def run(self): # 抓取股票列表 - df_stock = self.to_zvt_entity(get_all_securities(code='stock'), entity_type='stock') + df_stock = self.to_zvt_entity(get_all_securities(code="stock"), entity_type="stock") df_to_db(df_stock, data_schema=Stock, provider=self.provider, force_update=self.force_update) # persist StockDetail too df_to_db(df=df_stock, data_schema=StockDetail, provider=self.provider, force_update=self.force_update) @@ -59,7 +60,7 @@ class JqChinaEtfRecorder(BaseJqChinaMetaRecorder): def run(self): # 抓取etf列表 - df_index = self.to_zvt_entity(get_all_securities(code='etf'), entity_type='etf', category='etf') + df_index = self.to_zvt_entity(get_all_securities(code="etf"), entity_type="etf", category="etf") df_to_db(df_index, data_schema=Etf, provider=self.provider, force_update=self.force_update) # self.logger.info(df_index) @@ -67,42 +68,37 @@ def run(self): class JqChinaStockEtfPortfolioRecorder(TimeSeriesDataRecorder): - entity_provider = 'joinquant' + entity_provider = "joinquant" entity_schema = Etf # 数据来自jq - provider = 'joinquant' + provider = "joinquant" data_schema = EtfStock - def __init__(self, entity_type='etf', exchanges=['sh', 'sz'], entity_ids=None, codes=None, day_data=True, batch_size=10, - force_update=False, sleeping_time=5, default_size=2000, real_time=False, fix_duplicate_way='add', - start_timestamp=None, end_timestamp=None, close_hour=0, close_minute=0) -> None: - super().__init__(entity_type, exchanges, entity_ids, codes, day_data, batch_size, force_update, sleeping_time, - default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour, - close_minute) - def record(self, entity, start, end, size, timestamps): - df = run_query(table='finance.FUND_PORTFOLIO_STOCK', - conditions=f'pub_date#>=#{to_time_str(start)}&code#=#{entity.code}', - parse_dates=None) + df = run_query( + table="finance.FUND_PORTFOLIO_STOCK", + conditions=f"pub_date#>=#{to_time_str(start)}&code#=#{entity.code}", + parse_dates=None, + ) if pd_is_not_null(df): # id code period_start period_end pub_date report_type_id report_type rank symbol name shares market_cap proportion # 0 8640569 159919 2018-07-01 2018-09-30 2018-10-26 403003 第三季度 1 601318 中国平安 19869239.0 1.361043e+09 7.09 # 1 8640570 159919 2018-07-01 2018-09-30 2018-10-26 403003 第三季度 2 600519 贵州茅台 921670.0 6.728191e+08 3.50 # 2 8640571 159919 2018-07-01 2018-09-30 2018-10-26 403003 第三季度 3 600036 招商银行 18918815.0 5.806184e+08 3.02 # 3 8640572 159919 2018-07-01 2018-09-30 2018-10-26 403003 第三季度 4 601166 兴业银行 22862332.0 3.646542e+08 1.90 - df['timestamp'] = pd.to_datetime(df['pub_date']) + df["timestamp"] = pd.to_datetime(df["pub_date"]) - df.rename(columns={'symbol': 'stock_code', 'name': 'stock_name'}, inplace=True) - df['proportion'] = df['proportion'] * 0.01 + df.rename(columns={"symbol": "stock_code", "name": "stock_name"}, inplace=True) + df["proportion"] = df["proportion"] * 0.01 df = portfolio_relate_stock(df, entity) - df['stock_id'] = df['stock_code'].apply(lambda x: china_stock_code_to_id(x)) - df['id'] = df[['entity_id', 'stock_id', 'pub_date', 'id']].apply(lambda x: '_'.join(x.astype(str)), axis=1) - df['report_date'] = pd.to_datetime(df['period_end']) - df['report_period'] = df['report_type'].apply(lambda x: jq_to_report_period(x)) + df["stock_id"] = df["stock_code"].apply(lambda x: china_stock_code_to_id(x)) + df["id"] = df[["entity_id", "stock_id", "pub_date", "id"]].apply(lambda x: "_".join(x.astype(str)), axis=1) + df["report_date"] = pd.to_datetime(df["period_end"]) + df["report_period"] = df["report_type"].apply(lambda x: jq_to_report_period(x)) df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) @@ -112,8 +108,10 @@ def record(self, entity, start, end, size, timestamps): return None -if __name__ == '__main__': +if __name__ == "__main__": # JqChinaEtfRecorder().run() - JqChinaStockEtfPortfolioRecorder(codes=['510050']).run() + JqChinaStockEtfPortfolioRecorder(codes=["510050"]).run() + + # the __all__ is generated -__all__ = ['BaseJqChinaMetaRecorder', 'JqChinaStockRecorder', 'JqChinaEtfRecorder', 'JqChinaStockEtfPortfolioRecorder'] \ No newline at end of file +__all__ = ["BaseJqChinaMetaRecorder", "JqChinaStockRecorder", "JqChinaEtfRecorder", "JqChinaStockEtfPortfolioRecorder"] diff --git a/src/zvt/recorders/joinquant/meta/jq_trade_day_recorder.py b/src/zvt/recorders/joinquant/meta/jq_trade_day_recorder.py new file mode 100644 index 00000000..c6c528be --- /dev/null +++ b/src/zvt/recorders/joinquant/meta/jq_trade_day_recorder.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- +import pandas as pd +from jqdatapy.api import get_trade_days + +from zvt.contract.api import df_to_db +from zvt.contract.recorder import TimeSeriesDataRecorder +from zvt.domain import StockTradeDay, Stock +from zvt.utils.time_utils import to_time_str + + +class StockTradeDayRecorder(TimeSeriesDataRecorder): + entity_provider = "joinquant" + entity_schema = Stock + + provider = "joinquant" + data_schema = StockTradeDay + + def __init__( + self, + exchanges=None, + entity_id=None, + entity_ids=None, + day_data=False, + force_update=False, + sleeping_time=5, + real_time=False, + fix_duplicate_way="add", + start_timestamp=None, + end_timestamp=None, + entity_filters=None, + ) -> None: + super().__init__( + force_update, + sleeping_time, + exchanges, + entity_id, + entity_ids, + codes=["000001"], + day_data=day_data, + entity_filters=entity_filters, + ignore_failed=True, + real_time=real_time, + fix_duplicate_way=fix_duplicate_way, + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + ) + + def record(self, entity, start, end, size, timestamps): + df = pd.DataFrame() + dates = get_trade_days(date=to_time_str(start)) + dates = dates.iloc[:, 0] + self.logger.info(f"add dates:{dates}") + df["timestamp"] = pd.to_datetime(dates) + df["id"] = [to_time_str(date) for date in dates] + df["entity_id"] = "stock_sz_000001" + + df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) + + +if __name__ == "__main__": + r = StockTradeDayRecorder() + r.run() + + +# the __all__ is generated +__all__ = ["StockTradeDayRecorder"] diff --git a/src/zvt/recorders/joinquant/misc/__init__.py b/src/zvt/recorders/joinquant/misc/__init__.py new file mode 100644 index 00000000..b0fb5705 --- /dev/null +++ b/src/zvt/recorders/joinquant/misc/__init__.py @@ -0,0 +1,27 @@ +# -*- coding: utf-8 -*- + + +# the __all__ is generated +__all__ = [] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule jq_hk_holder_recorder +from .jq_hk_holder_recorder import * +from .jq_hk_holder_recorder import __all__ as _jq_hk_holder_recorder_all + +__all__ += _jq_hk_holder_recorder_all + +# import all from submodule jq_index_money_flow_recorder +from .jq_index_money_flow_recorder import * +from .jq_index_money_flow_recorder import __all__ as _jq_index_money_flow_recorder_all + +__all__ += _jq_index_money_flow_recorder_all + +# import all from submodule jq_stock_money_flow_recorder +from .jq_stock_money_flow_recorder import * +from .jq_stock_money_flow_recorder import __all__ as _jq_stock_money_flow_recorder_all + +__all__ += _jq_stock_money_flow_recorder_all diff --git a/src/zvt/recorders/joinquant/misc/jq_hk_holder_recorder.py b/src/zvt/recorders/joinquant/misc/jq_hk_holder_recorder.py new file mode 100644 index 00000000..536d95ec --- /dev/null +++ b/src/zvt/recorders/joinquant/misc/jq_hk_holder_recorder.py @@ -0,0 +1,110 @@ +import pandas as pd +from jqdatapy.api import run_query + +from zvt.contract.api import df_to_db, get_data +from zvt.contract.recorder import TimestampsDataRecorder +from zvt.domain import Index +from zvt.domain.misc.holder import HkHolder +from zvt.recorders.joinquant.common import to_entity_id +from zvt.utils.pd_utils import pd_is_not_null +from zvt.utils.time_utils import to_time_str, TIME_FORMAT_DAY, to_pd_timestamp + + +# 这里选择继承TimestampsDataRecorder是因为 +# 1)时间上就是交易日的列表,这个是可知的,可以以此为增量计算点 +# 2)HkHolder数据结构的设计: +# 沪股通/深股通 每日 持有 标的(股票)的情况 +# 抓取的角度是entity从Index中获取 沪股通/深股通,然后按 每日 去获取 + + +class JoinquantHkHolderRecorder(TimestampsDataRecorder): + entity_provider = "exchange" + entity_schema = Index + + provider = "joinquant" + data_schema = HkHolder + + def __init__( + self, + day_data=False, + force_update=False, + sleeping_time=5, + real_time=False, + start_timestamp=None, + end_timestamp=None, + ) -> None: + # 聚宽编码 + # 市场通编码 市场通名称 + # 310001 沪股通 + # 310002 深股通 + # 310003 港股通(沪) + # 310004 港股通(深) + codes = ["310001", "310002"] + + super().__init__( + force_update, + sleeping_time, + ["cn"], + None, + codes, + day_data, + real_time=real_time, + fix_duplicate_way="ignore", + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + ) + + def init_timestamps(self, entity): + # 聚宽数据从2017年3月17开始 + return pd.date_range(start=to_pd_timestamp("2017-3-17"), end=pd.Timestamp.now(), freq="B").tolist() + + # 覆盖这个方式是因为,HkHolder里面entity其实是股票,而recorder中entity是 Index类型(沪股通/深股通) + def get_latest_saved_record(self, entity): + order = eval("self.data_schema.{}.desc()".format(self.get_evaluated_time_field())) + + records = get_data( + filters=[HkHolder.holder_code == entity.code], + provider=self.provider, + data_schema=self.data_schema, + order=order, + limit=1, + return_type="domain", + session=self.session, + ) + if records: + return records[0] + return None + + def record(self, entity, start, end, size, timestamps): + for timestamp in timestamps: + df = run_query( + table="finance.STK_HK_HOLD_INFO", conditions=f"link_id#=#{entity.code}&day#=#{to_time_str(timestamp)}" + ) + print(df) + + if pd_is_not_null(df): + df.rename( + columns={"day": "timestamp", "link_id": "holder_code", "link_name": "holder_name"}, inplace=True + ) + df["timestamp"] = pd.to_datetime(df["timestamp"]) + + df["entity_id"] = df["code"].apply(lambda x: to_entity_id(entity_type="stock", jq_code=x)) + df["code"] = df["code"].apply(lambda x: x.split(".")[0]) + + # id格式为:{holder_name}_{entity_id}_{timestamp} + df["id"] = df[["holder_name", "entity_id", "timestamp"]].apply( + lambda se: "{}_{}_{}".format( + se["holder_name"], se["entity_id"], to_time_str(se["timestamp"], fmt=TIME_FORMAT_DAY) + ), + axis=1, + ) + + df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) + + +if __name__ == "__main__": + JoinquantHkHolderRecorder(sleeping_time=10).run() + + +# the __all__ is generated +__all__ = ["JoinquantHkHolderRecorder"] diff --git a/src/zvt/recorders/joinquant/misc/jq_index_money_flow_recorder.py b/src/zvt/recorders/joinquant/misc/jq_index_money_flow_recorder.py new file mode 100644 index 00000000..1483b1ff --- /dev/null +++ b/src/zvt/recorders/joinquant/misc/jq_index_money_flow_recorder.py @@ -0,0 +1,135 @@ +# -*- coding: utf-8 -*- +import pandas as pd + +from zvt.contract import IntervalLevel +from zvt.contract.api import df_to_db +from zvt.contract.recorder import FixedCycleDataRecorder +from zvt.domain import IndexMoneyFlow, Index, StockMoneyFlow +from zvt.utils.pd_utils import pd_is_not_null +from zvt.utils.time_utils import to_time_str + + +class JoinquantIndexMoneyFlowRecorder(FixedCycleDataRecorder): + entity_provider = "exchange" + entity_schema = Index + + provider = "joinquant" + data_schema = IndexMoneyFlow + + def __init__( + self, + force_update=True, + sleeping_time=10, + exchanges=None, + entity_id=None, + entity_ids=None, + code=None, + codes=None, + day_data=False, + entity_filters=None, + ignore_failed=True, + real_time=False, + fix_duplicate_way="ignore", + start_timestamp=None, + end_timestamp=None, + level=IntervalLevel.LEVEL_1DAY, + kdata_use_begin_time=False, + one_day_trading_minutes=24 * 60, + return_unfinished=False, + ) -> None: + # 上证指数,深证成指,创业板指,科创板 + support_codes = ["000001", "399001", "399006", "000688"] + if not codes: + codes = support_codes + else: + codes = list(set(codes) & set(support_codes)) + super().__init__( + force_update, + sleeping_time, + exchanges, + entity_id, + entity_ids, + code, + codes, + day_data, + entity_filters, + ignore_failed, + real_time, + fix_duplicate_way, + start_timestamp, + end_timestamp, + level, + kdata_use_begin_time, + one_day_trading_minutes, + return_unfinished, + ) + + def record(self, entity, start, end, size, timestamps): + # 上证 + if entity.code == "000001": + all_df = StockMoneyFlow.query_data( + provider=self.provider, start_timestamp=start, filters=[StockMoneyFlow.entity_id.like("stock_sh%")] + ) + # 深证 + elif entity.code == "399001": + all_df = StockMoneyFlow.query_data( + provider=self.provider, start_timestamp=start, filters=[StockMoneyFlow.entity_id.like("stock_sz%")] + ) + # 创业板 + elif entity.code == "399006": + all_df = StockMoneyFlow.query_data( + provider=self.provider, start_timestamp=start, filters=[StockMoneyFlow.code.like("300%")] + ) + # 科创板 + elif entity.code == "000688": + all_df = StockMoneyFlow.query_data( + provider=self.provider, start_timestamp=start, filters=[StockMoneyFlow.code.like("688%")] + ) + + if pd_is_not_null(all_df): + g = all_df.groupby("timestamp") + for timestamp, df in g: + se = pd.Series( + { + "id": "{}_{}".format(entity.id, to_time_str(timestamp)), + "entity_id": entity.id, + "timestamp": timestamp, + "code": entity.code, + "name": entity.name, + } + ) + for col in [ + "net_main_inflows", + "net_huge_inflows", + "net_big_inflows", + "net_medium_inflows", + "net_small_inflows", + ]: + se[col] = df[col].sum() + + for col in [ + "net_main_inflow_rate", + "net_huge_inflow_rate", + "net_big_inflow_rate", + "net_medium_inflow_rate", + "net_small_inflow_rate", + ]: + se[col] = df[col].sum() / len(df) + + index_df = se.to_frame().T + + self.logger.info(index_df) + + df_to_db( + df=index_df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update + ) + + return None + + +if __name__ == "__main__": + JoinquantIndexMoneyFlowRecorder(start_timestamp="2020-12-01").run() + + +# the __all__ is generated +__all__ = ["JoinquantIndexMoneyFlowRecorder"] diff --git a/src/zvt/recorders/joinquant/misc/jq_stock_money_flow_recorder.py b/src/zvt/recorders/joinquant/misc/jq_stock_money_flow_recorder.py new file mode 100644 index 00000000..e1ed96bb --- /dev/null +++ b/src/zvt/recorders/joinquant/misc/jq_stock_money_flow_recorder.py @@ -0,0 +1,166 @@ +# -*- coding: utf-8 -*- +import pandas as pd +from jqdatapy import get_token, get_money_flow + +from zvt import zvt_config +from zvt.api.kdata import generate_kdata_id +from zvt.contract import IntervalLevel +from zvt.contract.api import df_to_db +from zvt.contract.recorder import FixedCycleDataRecorder +from zvt.domain import StockMoneyFlow, Stock +from zvt.recorders.joinquant.common import to_jq_entity_id +from zvt.recorders.joinquant.misc.jq_index_money_flow_recorder import JoinquantIndexMoneyFlowRecorder +from zvt.utils.pd_utils import pd_is_not_null +from zvt.utils.time_utils import TIME_FORMAT_DAY, to_time_str + + +class JoinquantStockMoneyFlowRecorder(FixedCycleDataRecorder): + entity_provider = "joinquant" + entity_schema = Stock + + provider = "joinquant" + data_schema = StockMoneyFlow + + def __init__( + self, + force_update=True, + sleeping_time=10, + exchanges=None, + entity_id=None, + entity_ids=None, + code=None, + codes=None, + day_data=False, + entity_filters=None, + ignore_failed=True, + real_time=False, + fix_duplicate_way="ignore", + start_timestamp=None, + end_timestamp=None, + level=IntervalLevel.LEVEL_1DAY, + kdata_use_begin_time=False, + one_day_trading_minutes=24 * 60, + compute_index_money_flow=False, + return_unfinished=False, + ) -> None: + super().__init__( + force_update, + sleeping_time, + exchanges, + entity_id, + entity_ids, + code, + codes, + day_data, + entity_filters, + ignore_failed, + real_time, + fix_duplicate_way, + start_timestamp, + end_timestamp, + level, + kdata_use_begin_time, + one_day_trading_minutes, + return_unfinished, + ) + self.compute_index_money_flow = compute_index_money_flow + get_token(zvt_config["jq_username"], zvt_config["jq_password"], force=True) + + def generate_domain_id(self, entity, original_data): + return generate_kdata_id(entity_id=entity.id, timestamp=original_data["timestamp"], level=self.level) + + def on_finish(self): + # 根据 个股资金流 计算 大盘资金流 + if self.compute_index_money_flow: + JoinquantIndexMoneyFlowRecorder().run() + + def record(self, entity, start, end, size, timestamps): + if not self.end_timestamp: + df = get_money_flow(code=to_jq_entity_id(entity), date=to_time_str(start)) + else: + df = get_money_flow(code=to_jq_entity_id(entity), date=start, end_date=to_time_str(self.end_timestamp)) + + df = df.dropna() + + if pd_is_not_null(df): + df["name"] = entity.name + df.rename( + columns={ + "date": "timestamp", + "net_amount_main": "net_main_inflows", + "net_pct_main": "net_main_inflow_rate", + "net_amount_xl": "net_huge_inflows", + "net_pct_xl": "net_huge_inflow_rate", + "net_amount_l": "net_big_inflows", + "net_pct_l": "net_big_inflow_rate", + "net_amount_m": "net_medium_inflows", + "net_pct_m": "net_medium_inflow_rate", + "net_amount_s": "net_small_inflows", + "net_pct_s": "net_small_inflow_rate", + }, + inplace=True, + ) + + # 转换到标准float + inflows_cols = [ + "net_main_inflows", + "net_huge_inflows", + "net_big_inflows", + "net_medium_inflows", + "net_small_inflows", + ] + for col in inflows_cols: + df[col] = pd.to_numeric(df[col], errors="coerce") + df = df.dropna() + + if not pd_is_not_null(df): + return None + + df[inflows_cols] = df[inflows_cols].apply(lambda x: x * 10000) + + inflow_rate_cols = [ + "net_main_inflow_rate", + "net_huge_inflow_rate", + "net_big_inflow_rate", + "net_medium_inflow_rate", + "net_small_inflow_rate", + ] + for col in inflow_rate_cols: + df[col] = pd.to_numeric(df[col], errors="coerce") + df = df.dropna() + if not pd_is_not_null(df): + return None + + df[inflow_rate_cols] = df[inflow_rate_cols].apply(lambda x: x / 100) + + # 计算总流入 + df["net_inflows"] = ( + df["net_huge_inflows"] + df["net_big_inflows"] + df["net_medium_inflows"] + df["net_small_inflows"] + ) + # 计算总流入率 + amount = df["net_main_inflows"] / df["net_main_inflow_rate"] + df["net_inflow_rate"] = df["net_inflows"] / amount + + df["entity_id"] = entity.id + df["timestamp"] = pd.to_datetime(df["timestamp"]) + df["provider"] = "joinquant" + df["code"] = entity.code + + def generate_kdata_id(se): + return "{}_{}".format(se["entity_id"], to_time_str(se["timestamp"], fmt=TIME_FORMAT_DAY)) + + df["id"] = df[["entity_id", "timestamp"]].apply(generate_kdata_id, axis=1) + + df = df.drop_duplicates(subset="id", keep="last") + + df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) + + return None + + +if __name__ == "__main__": + JoinquantStockMoneyFlowRecorder(codes=["000578"]).run() + + +# the __all__ is generated +__all__ = ["JoinquantStockMoneyFlowRecorder"] diff --git a/src/zvt/recorders/joinquant/overall/__init__.py b/src/zvt/recorders/joinquant/overall/__init__.py new file mode 100644 index 00000000..10f7a5e2 --- /dev/null +++ b/src/zvt/recorders/joinquant/overall/__init__.py @@ -0,0 +1,27 @@ +# -*- coding: utf-8 -*- + + +# the __all__ is generated +__all__ = [] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule jq_margin_trading_recorder +from .jq_margin_trading_recorder import * +from .jq_margin_trading_recorder import __all__ as _jq_margin_trading_recorder_all + +__all__ += _jq_margin_trading_recorder_all + +# import all from submodule jq_cross_market_recorder +from .jq_cross_market_recorder import * +from .jq_cross_market_recorder import __all__ as _jq_cross_market_recorder_all + +__all__ += _jq_cross_market_recorder_all + +# import all from submodule jq_stock_summary_recorder +from .jq_stock_summary_recorder import * +from .jq_stock_summary_recorder import __all__ as _jq_stock_summary_recorder_all + +__all__ += _jq_stock_summary_recorder_all diff --git a/src/zvt/recorders/joinquant/overall/jq_cross_market_recorder.py b/src/zvt/recorders/joinquant/overall/jq_cross_market_recorder.py new file mode 100644 index 00000000..a3720561 --- /dev/null +++ b/src/zvt/recorders/joinquant/overall/jq_cross_market_recorder.py @@ -0,0 +1,75 @@ +from jqdatapy.api import run_query + +from zvt.contract.recorder import TimeSeriesDataRecorder +from zvt.domain import Index, CrossMarketSummary +from zvt.utils.time_utils import to_time_str +from zvt.utils.utils import multiple_number + + +class CrossMarketSummaryRecorder(TimeSeriesDataRecorder): + entity_provider = "joinquant" + entity_schema = Index + + provider = "joinquant" + data_schema = CrossMarketSummary + + def __init__(self, force_update=False, sleeping_time=5, real_time=False, fix_duplicate_way="add") -> None: + + # 聚宽编码 + # 市场通编码 市场通名称 + # 310001 沪股通 + # 310002 深股通 + # 310003 港股通(沪) + # 310004 港股通(深) + + codes = ["310001", "310002", "310003", "310004"] + super().__init__( + force_update, + sleeping_time, + ["cn"], + None, + codes=codes, + day_data=True, + real_time=real_time, + fix_duplicate_way=fix_duplicate_way, + ) + + def init_entities(self): + super().init_entities() + + def record(self, entity, start, end, size, timestamps): + df = run_query(table="finance.STK_ML_QUOTA", conditions=f"link_id#=#{entity.code}&day#>=#{to_time_str(start)}") + print(df) + + json_results = [] + + for item in df.to_dict(orient="records"): + result = { + "provider": self.provider, + "timestamp": item["day"], + "name": entity.name, + "buy_amount": multiple_number(item["buy_amount"], 100000000), + "buy_volume": item["buy_volume"], + "sell_amount": multiple_number(item["sell_amount"], 100000000), + "sell_volume": item["sell_volume"], + "quota_daily": multiple_number(item["quota_daily"], 100000000), + "quota_daily_balance": multiple_number(item["quota_daily_balance"], 100000000), + } + + json_results.append(result) + + if len(json_results) < 100: + self.one_shot = True + + return json_results + + def get_data_map(self): + return None + + +if __name__ == "__main__": + CrossMarketSummaryRecorder().run() + + +# the __all__ is generated +__all__ = ["CrossMarketSummaryRecorder"] diff --git a/src/zvt/recorders/joinquant/overall/jq_margin_trading_recorder.py b/src/zvt/recorders/joinquant/overall/jq_margin_trading_recorder.py new file mode 100644 index 00000000..7eb6de74 --- /dev/null +++ b/src/zvt/recorders/joinquant/overall/jq_margin_trading_recorder.py @@ -0,0 +1,94 @@ +from jqdatapy.api import run_query + +from zvt.contract.recorder import TimeSeriesDataRecorder +from zvt.domain import Index, MarginTradingSummary +from zvt.utils.time_utils import to_time_str + +# 聚宽编码 +# XSHG-上海证券交易所 +# XSHE-深圳证券交易所 + +code_map_jq = {"000001": "XSHG", "399106": "XSHE"} + + +class MarginTradingSummaryRecorder(TimeSeriesDataRecorder): + entity_provider = "exchange" + entity_schema = Index + + provider = "joinquant" + data_schema = MarginTradingSummary + + def __init__( + self, + force_update=False, + sleeping_time=5, + exchanges=None, + entity_id=None, + entity_ids=None, + day_data=False, + entity_filters=None, + ignore_failed=True, + real_time=False, + fix_duplicate_way="add", + start_timestamp=None, + end_timestamp=None, + ) -> None: + # 上海A股,深圳市场 + codes = ["000001", "399106"] + super().__init__( + force_update, + sleeping_time, + exchanges, + entity_id, + entity_ids, + codes=codes, + day_data=day_data, + entity_filters=entity_filters, + ignore_failed=ignore_failed, + real_time=real_time, + fix_duplicate_way=fix_duplicate_way, + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + ) + + def record(self, entity, start, end, size, timestamps): + jq_code = code_map_jq.get(entity.code) + + df = run_query( + table="finance.STK_MT_TOTAL", + conditions=f"exchange_code#=#{jq_code}&date#>=#{to_time_str(start)}", + parse_dates=["date"], + ) + print(df) + + json_results = [] + + for item in df.to_dict(orient="records"): + result = { + "provider": self.provider, + "timestamp": item["date"], + "name": entity.name, + "margin_value": item["fin_value"], + "margin_buy": item["fin_buy_value"], + "short_value": item["sec_value"], + "short_volume": item["sec_sell_volume"], + "total_value": item["fin_sec_value"], + } + + json_results.append(result) + + if len(json_results) < 100: + self.one_shot = True + + return json_results + + def get_data_map(self): + return None + + +if __name__ == "__main__": + MarginTradingSummaryRecorder().run() + + +# the __all__ is generated +__all__ = ["MarginTradingSummaryRecorder"] diff --git a/src/zvt/recorders/joinquant/overall/jq_stock_summary_recorder.py b/src/zvt/recorders/joinquant/overall/jq_stock_summary_recorder.py new file mode 100644 index 00000000..2fc8127a --- /dev/null +++ b/src/zvt/recorders/joinquant/overall/jq_stock_summary_recorder.py @@ -0,0 +1,102 @@ +from jqdatapy.api import run_query + +from zvt.contract.recorder import TimeSeriesDataRecorder +from zvt.domain import Index +from zvt.domain import StockSummary +from zvt.utils.time_utils import to_time_str +from zvt.utils.utils import multiple_number + +# 聚宽编码 +# 322001 上海市场 +# 322002 上海A股 +# 322003 上海B股 +# 322004 深圳市场 该市场交易所未公布成交量和成交笔数 +# 322005 深市主板 +# 322006 中小企业板 +# 322007 创业板 + +code_map_jq = {"000001": "322002", "399106": "322004", "399001": "322005", "399005": "322006", "399006": "322007"} + + +class StockSummaryRecorder(TimeSeriesDataRecorder): + entity_provider = "exchange" + entity_schema = Index + + provider = "joinquant" + data_schema = StockSummary + + def __init__( + self, + force_update=False, + sleeping_time=5, + exchanges=None, + entity_id=None, + entity_ids=None, + day_data=False, + entity_filters=None, + ignore_failed=True, + real_time=False, + fix_duplicate_way="add", + start_timestamp=None, + end_timestamp=None, + ) -> None: + # 上海A股,深圳市场,深圳成指,中小板,创业板 + codes = ["000001", "399106", "399001", "399005", "399006"] + super().__init__( + force_update, + sleeping_time, + exchanges, + entity_id, + entity_ids, + codes=codes, + day_data=day_data, + entity_filters=entity_filters, + ignore_failed=ignore_failed, + real_time=real_time, + fix_duplicate_way=fix_duplicate_way, + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + ) + + def record(self, entity, start, end, size, timestamps): + jq_code = code_map_jq.get(entity.code) + + df = run_query( + table="finance.STK_EXCHANGE_TRADE_INFO", + conditions=f"exchange_code#=#{jq_code}&date#>=#{to_time_str(start)}", + parse_dates=["date"], + ) + print(df) + + json_results = [] + + for item in df.to_dict(orient="records"): + result = { + "provider": self.provider, + "timestamp": item["date"], + "name": entity.name, + "pe": item["pe_average"], + "total_value": multiple_number(item["total_market_cap"], 100000000), + "total_tradable_vaule": multiple_number(item["circulating_market_cap"], 100000000), + "volume": multiple_number(item["volume"], 10000), + "turnover": multiple_number(item["money"], 100000000), + "turnover_rate": item["turnover_ratio"], + } + + json_results.append(result) + + if len(json_results) < 100: + self.one_shot = True + + return json_results + + def get_data_map(self): + return None + + +if __name__ == "__main__": + StockSummaryRecorder().run() + + +# the __all__ is generated +__all__ = ["StockSummaryRecorder"] diff --git a/zvt/recorders/joinquant/quotes/__init__.py b/src/zvt/recorders/joinquant/quotes/__init__.py similarity index 92% rename from zvt/recorders/joinquant/quotes/__init__.py rename to src/zvt/recorders/joinquant/quotes/__init__.py index 75f35537..2b8e9086 100644 --- a/zvt/recorders/joinquant/quotes/__init__.py +++ b/src/zvt/recorders/joinquant/quotes/__init__.py @@ -1,4 +1,6 @@ # -*- coding: utf-8 -*- + + # the __all__ is generated __all__ = [] @@ -9,9 +11,11 @@ # import all from submodule jq_index_kdata_recorder from .jq_index_kdata_recorder import * from .jq_index_kdata_recorder import __all__ as _jq_index_kdata_recorder_all + __all__ += _jq_index_kdata_recorder_all # import all from submodule jq_stock_kdata_recorder from .jq_stock_kdata_recorder import * from .jq_stock_kdata_recorder import __all__ as _jq_stock_kdata_recorder_all -__all__ += _jq_stock_kdata_recorder_all \ No newline at end of file + +__all__ += _jq_stock_kdata_recorder_all diff --git a/src/zvt/recorders/joinquant/quotes/jq_index_kdata_recorder.py b/src/zvt/recorders/joinquant/quotes/jq_index_kdata_recorder.py new file mode 100644 index 00000000..1bef8475 --- /dev/null +++ b/src/zvt/recorders/joinquant/quotes/jq_index_kdata_recorder.py @@ -0,0 +1,143 @@ +# -*- coding: utf-8 -*- +import argparse + +import pandas as pd +from jqdatapy.api import get_token, get_bars + +from zvt import init_log, zvt_config +from zvt.api.kdata import generate_kdata_id, get_kdata_schema, get_kdata +from zvt.contract import IntervalLevel +from zvt.contract.api import df_to_db +from zvt.contract.recorder import FixedCycleDataRecorder +from zvt.domain import Index, IndexKdataCommon +from zvt.recorders.joinquant.common import to_jq_trading_level, to_jq_entity_id +from zvt.utils.pd_utils import pd_is_not_null +from zvt.utils.time_utils import to_time_str, TIME_FORMAT_DAY, TIME_FORMAT_ISO8601 + + +class JqChinaIndexKdataRecorder(FixedCycleDataRecorder): + entity_provider = "joinquant" + entity_schema = Index + + # 数据来自jq + provider = "joinquant" + + # 只是为了把recorder注册到data_schema + data_schema = IndexKdataCommon + + def __init__( + self, + force_update=True, + sleeping_time=10, + exchanges=None, + entity_id=None, + entity_ids=None, + code=None, + codes=None, + day_data=False, + entity_filters=None, + ignore_failed=True, + real_time=False, + fix_duplicate_way="ignore", + start_timestamp=None, + end_timestamp=None, + level=IntervalLevel.LEVEL_1DAY, + kdata_use_begin_time=False, + one_day_trading_minutes=24 * 60, + return_unfinished=False, + ) -> None: + level = IntervalLevel(level) + self.data_schema = get_kdata_schema(entity_type="index", level=level) + self.jq_trading_level = to_jq_trading_level(level) + get_token(zvt_config["jq_username"], zvt_config["jq_password"], force=True) + super().__init__( + force_update, + sleeping_time, + exchanges, + entity_id, + entity_ids, + code, + codes, + day_data, + entity_filters, + ignore_failed, + real_time, + fix_duplicate_way, + start_timestamp, + end_timestamp, + level, + kdata_use_begin_time, + one_day_trading_minutes, + return_unfinished, + ) + + def init_entities(self): + super().init_entities() + # ignore no data index + self.entities = [ + entity for entity in self.entities if entity.code not in ["310001", "310002", "310003", "310004"] + ] + + def generate_domain_id(self, entity, original_data): + return generate_kdata_id(entity_id=entity.id, timestamp=original_data["timestamp"], level=self.level) + + def record(self, entity, start, end, size, timestamps): + if not self.end_timestamp: + df = get_bars( + to_jq_entity_id(entity), + count=size, + unit=self.jq_trading_level, + # fields=['date', 'open', 'close', 'low', 'high', 'volume', 'money'] + ) + else: + end_timestamp = to_time_str(self.end_timestamp) + df = get_bars( + to_jq_entity_id(entity), + count=size, + unit=self.jq_trading_level, + # fields=['date', 'open', 'close', 'low', 'high', 'volume', 'money'], + end_date=end_timestamp, + ) + if pd_is_not_null(df): + df["name"] = entity.name + df.rename(columns={"money": "turnover", "date": "timestamp"}, inplace=True) + + df["entity_id"] = entity.id + df["timestamp"] = pd.to_datetime(df["timestamp"]) + df["provider"] = "joinquant" + df["level"] = self.level.value + df["code"] = entity.code + + def generate_kdata_id(se): + if self.level >= IntervalLevel.LEVEL_1DAY: + return "{}_{}".format(se["entity_id"], to_time_str(se["timestamp"], fmt=TIME_FORMAT_DAY)) + else: + return "{}_{}".format(se["entity_id"], to_time_str(se["timestamp"], fmt=TIME_FORMAT_ISO8601)) + + df["id"] = df[["entity_id", "timestamp"]].apply(generate_kdata_id, axis=1) + + df = df.drop_duplicates(subset="id", keep="last") + + df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) + + return None + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--level", help="trading level", default="1d", choices=[item.value for item in IntervalLevel]) + parser.add_argument("--codes", help="codes", default=["000001"], nargs="+") + + args = parser.parse_args() + + level = IntervalLevel(args.level) + codes = args.codes + + init_log("jq_china_stock_{}_kdata.log".format(args.level)) + JqChinaIndexKdataRecorder(level=level, sleeping_time=0, codes=codes, real_time=False).run() + + print(get_kdata(entity_id="index_sh_000001", limit=10)) + + +# the __all__ is generated +__all__ = ["JqChinaIndexKdataRecorder"] diff --git a/src/zvt/recorders/joinquant/quotes/jq_stock_kdata_recorder.py b/src/zvt/recorders/joinquant/quotes/jq_stock_kdata_recorder.py new file mode 100644 index 00000000..b461e099 --- /dev/null +++ b/src/zvt/recorders/joinquant/quotes/jq_stock_kdata_recorder.py @@ -0,0 +1,187 @@ +# -*- coding: utf-8 -*- + +import pandas as pd +from jqdatapy.api import get_token, get_bars + +from zvt import zvt_config +from zvt.api.kdata import generate_kdata_id, get_kdata_schema, get_kdata +from zvt.contract import IntervalLevel, AdjustType +from zvt.contract.api import df_to_db +from zvt.contract.recorder import FixedCycleDataRecorder +from zvt.domain import Stock, StockKdataCommon, Stock1wkHfqKdata +from zvt.recorders.joinquant.common import to_jq_trading_level, to_jq_entity_id +from zvt.utils.pd_utils import pd_is_not_null +from zvt.utils.time_utils import to_time_str, now_pd_timestamp, TIME_FORMAT_DAY, TIME_FORMAT_ISO8601 + + +class JqChinaStockKdataRecorder(FixedCycleDataRecorder): + entity_provider = "joinquant" + entity_schema = Stock + + # 数据来自jq + provider = "joinquant" + + # 只是为了把recorder注册到data_schema + data_schema = StockKdataCommon + + def __init__( + self, + force_update=True, + sleeping_time=10, + exchanges=None, + entity_id=None, + entity_ids=None, + code=None, + codes=None, + day_data=False, + entity_filters=None, + ignore_failed=True, + real_time=False, + fix_duplicate_way="ignore", + start_timestamp=None, + end_timestamp=None, + level=IntervalLevel.LEVEL_1DAY, + kdata_use_begin_time=False, + one_day_trading_minutes=24 * 60, + adjust_type=AdjustType.qfq, + return_unfinished=False, + ) -> None: + level = IntervalLevel(level) + adjust_type = AdjustType(adjust_type) + self.data_schema = get_kdata_schema(entity_type="stock", level=level, adjust_type=adjust_type) + self.jq_trading_level = to_jq_trading_level(level) + + super().__init__( + force_update, + sleeping_time, + exchanges, + entity_id, + entity_ids, + code, + codes, + day_data, + entity_filters, + ignore_failed, + real_time, + fix_duplicate_way, + start_timestamp, + end_timestamp, + level, + kdata_use_begin_time, + one_day_trading_minutes, + return_unfinished, + ) + + self.adjust_type = adjust_type + + get_token(zvt_config["jq_username"], zvt_config["jq_password"], force=True) + + def init_entities(self): + super().init_entities() + # 过滤掉退市的 + self.entities = [ + entity for entity in self.entities if (entity.end_date is None) or (entity.end_date > now_pd_timestamp()) + ] + + def generate_domain_id(self, entity, original_data): + return generate_kdata_id(entity_id=entity.id, timestamp=original_data["timestamp"], level=self.level) + + def recompute_qfq(self, entity, qfq_factor, last_timestamp): + # 重新计算前复权数据 + if qfq_factor != 0: + kdatas = get_kdata( + provider=self.provider, + entity_id=entity.id, + level=self.level.value, + order=self.data_schema.timestamp.asc(), + return_type="domain", + session=self.session, + filters=[self.data_schema.timestamp < last_timestamp], + ) + if kdatas: + self.logger.info("recomputing {} qfq kdata,factor is:{}".format(entity.code, qfq_factor)) + for kdata in kdatas: + kdata.open = round(kdata.open * qfq_factor, 2) + kdata.close = round(kdata.close * qfq_factor, 2) + kdata.high = round(kdata.high * qfq_factor, 2) + kdata.low = round(kdata.low * qfq_factor, 2) + self.session.add_all(kdatas) + self.session.commit() + + def record(self, entity, start, end, size, timestamps): + if self.adjust_type == AdjustType.hfq: + fq_ref_date = "2000-01-01" + else: + fq_ref_date = to_time_str(now_pd_timestamp()) + + if not self.end_timestamp: + df = get_bars( + to_jq_entity_id(entity), + count=size, + unit=self.jq_trading_level, + # fields=['date', 'open', 'close', 'low', 'high', 'volume', 'money'], + fq_ref_date=fq_ref_date, + ) + else: + end_timestamp = to_time_str(self.end_timestamp) + df = get_bars( + to_jq_entity_id(entity), + count=size, + unit=self.jq_trading_level, + # fields=['date', 'open', 'close', 'low', 'high', 'volume', 'money'], + end_date=end_timestamp, + fq_ref_date=fq_ref_date, + ) + if pd_is_not_null(df): + df["name"] = entity.name + df.rename(columns={"money": "turnover", "date": "timestamp"}, inplace=True) + + df["entity_id"] = entity.id + df["timestamp"] = pd.to_datetime(df["timestamp"]) + df["provider"] = "joinquant" + df["level"] = self.level.value + df["code"] = entity.code + + # 判断是否需要重新计算之前保存的前复权数据 + if self.adjust_type == AdjustType.qfq: + check_df = df.head(1) + check_date = check_df["timestamp"][0] + current_df = get_kdata( + entity_id=entity.id, + provider=self.provider, + start_timestamp=check_date, + end_timestamp=check_date, + limit=1, + level=self.level, + adjust_type=self.adjust_type, + ) + if pd_is_not_null(current_df): + old = current_df.iloc[0, :]["close"] + new = check_df["close"][0] + # 相同时间的close不同,表明前复权需要重新计算 + if round(old, 2) != round(new, 2): + qfq_factor = new / old + last_timestamp = pd.Timestamp(check_date) + self.recompute_qfq(entity, qfq_factor=qfq_factor, last_timestamp=last_timestamp) + + def generate_kdata_id(se): + if self.level >= IntervalLevel.LEVEL_1DAY: + return "{}_{}".format(se["entity_id"], to_time_str(se["timestamp"], fmt=TIME_FORMAT_DAY)) + else: + return "{}_{}".format(se["entity_id"], to_time_str(se["timestamp"], fmt=TIME_FORMAT_ISO8601)) + + df["id"] = df[["entity_id", "timestamp"]].apply(generate_kdata_id, axis=1) + + df = df.drop_duplicates(subset="id", keep="last") + + df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) + + return None + + +if __name__ == "__main__": + Stock1wkHfqKdata.record_data(codes=["300999"]) + + +# the __all__ is generated +__all__ = ["JqChinaStockKdataRecorder"] diff --git a/src/zvt/recorders/jqka/__init__.py b/src/zvt/recorders/jqka/__init__.py new file mode 100644 index 00000000..ae5addc1 --- /dev/null +++ b/src/zvt/recorders/jqka/__init__.py @@ -0,0 +1,21 @@ +# -*- coding: utf-8 -*- + + +# the __all__ is generated +__all__ = [] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule jqka_api +from .jqka_api import * +from .jqka_api import __all__ as _jqka_api_all + +__all__ += _jqka_api_all + +# import all from submodule emotion +from .emotion import * +from .emotion import __all__ as _emotion_all + +__all__ += _emotion_all diff --git a/src/zvt/recorders/jqka/emotion/JqkaEmotionRecorder.py b/src/zvt/recorders/jqka/emotion/JqkaEmotionRecorder.py new file mode 100644 index 00000000..ac4fc955 --- /dev/null +++ b/src/zvt/recorders/jqka/emotion/JqkaEmotionRecorder.py @@ -0,0 +1,213 @@ +# -*- coding: utf-8 -*- +import re +from typing import List + +import pandas as pd + +from zvt.api.utils import china_stock_code_to_id +from zvt.contract.api import df_to_db +from zvt.contract.recorder import TimestampsDataRecorder +from zvt.domain import Stock +from zvt.domain.emotion.emotion import LimitUpInfo, LimitDownInfo, Emotion +from zvt.recorders.jqka import jqka_api +from zvt.utils.time_utils import to_time_str, date_time_by_interval, current_date, to_pd_timestamp + + +def _get_high_days_count(high_days_str: str): + if not high_days_str or (high_days_str == "首板"): + return 1 + pattern = r"\d+" + result = re.findall(pattern, high_days_str) + return int(result[-1]) + + +class JqkaLimitUpRecorder(TimestampsDataRecorder): + entity_provider = "em" + entity_schema = Stock + + provider = "jqka" + data_schema = LimitUpInfo + + def init_entities(self): + # fake entity to for trigger run + self.entities = [Stock(id="stock_sz_000001")] + + def init_timestamps(self, entity_item) -> List[pd.Timestamp]: + latest_infos = LimitUpInfo.query_data( + provider=self.provider, order=LimitUpInfo.timestamp.desc(), limit=1, return_type="domain" + ) + if latest_infos and not self.force_update: + start_date = latest_infos[0].timestamp + else: + # 最近一年的数据 + start_date = date_time_by_interval(current_date(), -360) + return pd.date_range(start=start_date, end=pd.Timestamp.now(), freq="B").tolist() + + def record(self, entity, start, end, size, timestamps): + for timestamp in timestamps: + the_date = to_time_str(timestamp) + self.logger.info(f"record {self.data_schema} to {the_date}") + limit_ups = jqka_api.get_limit_up(date=the_date) + if limit_ups: + records = [] + for data in limit_ups: + entity_id = china_stock_code_to_id(code=data["code"]) + record = { + "id": "{}_{}".format(entity_id, the_date), + "entity_id": entity_id, + "timestamp": to_pd_timestamp(the_date), + "code": data["code"], + "name": data["name"], + "is_new": data["is_new"], + "is_again_limit": data["is_again_limit"], + "open_count": data["open_num"] if data["open_num"] else 0, + "first_limit_up_time": pd.Timestamp.fromtimestamp(int(data["first_limit_up_time"])), + "last_limit_up_time": pd.Timestamp.fromtimestamp(int(data["last_limit_up_time"])), + "limit_up_type": data["limit_up_type"], + "order_amount": data["order_amount"], + "success_rate": data["limit_up_suc_rate"], + "currency_value": data["currency_value"], + "change_pct": data["change_rate"] / 100, + "turnover_rate": data["turnover_rate"] / 100, + "reason": data["reason_type"], + "high_days": data["high_days"], + "high_days_count": _get_high_days_count(data["high_days"]), + } + records.append(record) + df = pd.DataFrame.from_records(records) + df_to_db( + data_schema=self.data_schema, + df=df, + provider=self.provider, + force_update=True, + drop_duplicates=True, + ) + + +class JqkaLimitDownRecorder(TimestampsDataRecorder): + entity_provider = "em" + entity_schema = Stock + + provider = "jqka" + data_schema = LimitDownInfo + + def init_entities(self): + # fake entity to for trigger run + self.entities = [Stock(id="stock_sz_000001")] + + def init_timestamps(self, entity_item) -> List[pd.Timestamp]: + latest_infos = LimitDownInfo.query_data( + provider=self.provider, order=LimitDownInfo.timestamp.desc(), limit=1, return_type="domain" + ) + if latest_infos and not self.force_update: + start_date = latest_infos[0].timestamp + else: + # 最近一年的数据 + start_date = date_time_by_interval(current_date(), -360) + return pd.date_range(start=start_date, end=pd.Timestamp.now(), freq="B").tolist() + + def record(self, entity, start, end, size, timestamps): + for timestamp in timestamps: + the_date = to_time_str(timestamp) + self.logger.info(f"record {self.data_schema} to {the_date}") + limit_downs = jqka_api.get_limit_down(date=the_date) + if limit_downs: + records = [] + for idx, data in enumerate(limit_downs): + entity_id = china_stock_code_to_id(code=data["code"]) + record = { + "id": "{}_{}".format(entity_id, the_date), + "entity_id": entity_id, + "timestamp": to_pd_timestamp(the_date), + "code": data["code"], + "name": data["name"], + "is_new": data["is_new"], + "is_again_limit": data["is_again_limit"], + "currency_value": data["currency_value"], + "change_pct": data["change_rate"] / 100, + "turnover_rate": data["turnover_rate"] / 100, + } + records.append(record) + df = pd.DataFrame.from_records(records) + df_to_db( + data_schema=self.data_schema, + df=df, + provider=self.provider, + force_update=True, + drop_duplicates=True, + ) + + +def _cal_power_and_max_height(continuous_limit_up: dict): + max_height = 0 + power = 0 + for item in continuous_limit_up: + if max_height < item["height"]: + max_height = item["height"] + power = power + item["height"] * item["number"] + return max_height, power + + +class JqkaEmotionRecorder(TimestampsDataRecorder): + entity_provider = "em" + entity_schema = Stock + + provider = "jqka" + data_schema = Emotion + + def init_entities(self): + # fake entity to for trigger run + self.entities = [Stock(id="stock_sz_000001")] + + def init_timestamps(self, entity_item) -> List[pd.Timestamp]: + latest_infos = Emotion.query_data( + provider=self.provider, order=Emotion.timestamp.desc(), limit=1, return_type="domain" + ) + if latest_infos and not self.force_update: + start_date = latest_infos[0].timestamp + else: + # 最近一年的数据 + start_date = date_time_by_interval(current_date(), -365) + return pd.date_range(start=start_date, end=pd.Timestamp.now(), freq="B").tolist() + + def record(self, entity, start, end, size, timestamps): + for timestamp in timestamps: + the_date = to_time_str(timestamp) + self.logger.info(f"record {self.data_schema} to {the_date}") + limit_stats = jqka_api.get_limit_stats(date=the_date) + continuous_limit_up = jqka_api.get_continuous_limit_up(date=the_date) + max_height, continuous_power = _cal_power_and_max_height(continuous_limit_up=continuous_limit_up) + + if limit_stats: + # 大盘 + entity_id = "stock_sh_000001" + record = { + "id": "{}_{}".format(entity_id, the_date), + "entity_id": entity_id, + "timestamp": to_pd_timestamp(the_date), + "limit_up_count": limit_stats["limit_up_count"]["today"]["num"], + "limit_up_open_count": limit_stats["limit_up_count"]["today"]["open_num"], + "limit_up_success_rate": limit_stats["limit_up_count"]["today"]["rate"], + "limit_down_count": limit_stats["limit_down_count"]["today"]["num"], + "limit_down_open_count": limit_stats["limit_down_count"]["today"]["open_num"], + "limit_down_success_rate": limit_stats["limit_down_count"]["today"]["rate"], + "max_height": max_height, + "continuous_power": continuous_power, + } + df = pd.DataFrame.from_records([record]) + df_to_db( + data_schema=self.data_schema, + df=df, + provider=self.provider, + force_update=True, + drop_duplicates=True, + ) + + +if __name__ == "__main__": + # JqkaLimitDownRecorder().run() + LimitDownInfo.record_data(start_timestamp="2024-02-02", end_timestamp="2024-02-16", force_update=True) + + +# the __all__ is generated +__all__ = ["JqkaLimitUpRecorder", "JqkaLimitDownRecorder", "JqkaEmotionRecorder"] diff --git a/src/zvt/recorders/jqka/emotion/__init__.py b/src/zvt/recorders/jqka/emotion/__init__.py new file mode 100644 index 00000000..f23c1747 --- /dev/null +++ b/src/zvt/recorders/jqka/emotion/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- + + +# the __all__ is generated +__all__ = [] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule JqkaEmotionRecorder +from .JqkaEmotionRecorder import * +from .JqkaEmotionRecorder import __all__ as _JqkaEmotionRecorder_all + +__all__ += _JqkaEmotionRecorder_all diff --git a/src/zvt/recorders/jqka/jqka_api.py b/src/zvt/recorders/jqka/jqka_api.py new file mode 100644 index 00000000..295b561c --- /dev/null +++ b/src/zvt/recorders/jqka/jqka_api.py @@ -0,0 +1,107 @@ +# -*- coding: utf-8 -*- + +import requests + +from zvt.utils.time_utils import now_timestamp, to_time_str, TIME_FORMAT_DAY1 +from zvt.utils.utils import chrome_copy_header_to_dict + +_JKQA_HEADER = chrome_copy_header_to_dict( + """ +Accept: application/json, text/plain, */* +Accept-Encoding: gzip, deflate, br +Accept-Language: zh-CN,zh;q=0.9,en;q=0.8 +Connection: keep-alive +Host: data.10jqka.com.cn +Referer: https://data.10jqka.com.cn/datacenterph/limitup/limtupInfo.html?fontzoom=no&client_userid=cA2fp&share_hxapp=gsc&share_action=webpage_share.1&back_source=wxhy +sec-ch-ua: "Not_A Brand";v="99", "Google Chrome";v="109", "Chromium";v="109" +sec-ch-ua-mobile: ?1 +sec-ch-ua-platform: "Android" +Sec-Fetch-Dest: empty +Sec-Fetch-Mode: cors +Sec-Fetch-Site: same-origin +User-Agent: Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Mobile Safari/537.36 +""" +) + + +def get_continuous_limit_up(date: str): + date_str = to_time_str(the_time=date, fmt=TIME_FORMAT_DAY1) + url = f"https://data.10jqka.com.cn/dataapi/limit_up/continuous_limit_up?filter=HS,GEM2STAR&date={date_str}" + resp = requests.get(url, headers=_JKQA_HEADER) + if resp.status_code == 200: + json_result = resp.json() + if json_result: + return json_result["data"] + raise RuntimeError(f"request jkqa data code: {resp.status_code}, error: {resp.text}") + + +def get_limit_stats(date: str): + date_str = to_time_str(the_time=date, fmt=TIME_FORMAT_DAY1) + url = f"https://data.10jqka.com.cn/dataapi/limit_up/limit_up_pool?page=1&limit=1&field=199112,10,9001,330323,330324,330325,9002,330329,133971,133970,1968584,3475914,9003,9004&filter=HS,GEM2STAR&date={date_str}&order_field=330324&order_type=0&_={now_timestamp()}" + resp = requests.get(url, headers=_JKQA_HEADER) + if resp.status_code == 200: + json_result = resp.json() + if json_result: + return { + "limit_up_count": json_result["data"]["limit_up_count"], + "limit_down_count": json_result["data"]["limit_down_count"], + } + raise RuntimeError(f"request jkqa data code: {resp.status_code}, error: {resp.text}") + + +def get_limit_up(date: str): + date_str = to_time_str(the_time=date, fmt=TIME_FORMAT_DAY1) + url = f"https://data.10jqka.com.cn/dataapi/limit_up/limit_up_pool?field=199112,10,9001,330323,330324,330325,9002,330329,133971,133970,1968584,3475914,9003,9004&filter=HS,GEM2STAR&order_field=199112&order_type=0&date={date_str}" + return get_jkqa_data(url=url) + + +def get_limit_down(date: str): + date_str = to_time_str(the_time=date, fmt=TIME_FORMAT_DAY1) + url = f"https://data.10jqka.com.cn/dataapi/limit_up/lower_limit_pool?field=199112,10,9001,330323,330324,330325,9002,330329,133971,133970,1968584,3475914,9003,9004&filter=HS,GEM2STAR&order_field=199112&order_type=0&date={date_str}" + return get_jkqa_data(url=url) + + +def get_jkqa_data(url, pn=1, ps=200, fetch_all=True, headers=_JKQA_HEADER): + requesting_url = url + f"&page={pn}&limit={ps}&_={now_timestamp()}" + print(requesting_url) + resp = requests.get(requesting_url, headers=headers) + if resp.status_code == 200: + json_result = resp.json() + if json_result and json_result["data"]: + data: list = json_result["data"]["info"] + if fetch_all: + if pn < json_result["data"]["page"]["count"]: + next_data = get_jkqa_data( + pn=pn + 1, + ps=ps, + url=url, + fetch_all=fetch_all, + ) + if next_data: + data = data + next_data + if pn == 1 and len(data) != json_result["data"]["page"]["total"]: + raise RuntimeError(f"Assertion failed, the total length of data should be {json_result['data']['page']['total']}, only {len(data)} fetched") + return data + else: + return data + else: + return data + else: + return data + return None + raise RuntimeError(f"request jkqa data code: {resp.status_code}, error: {resp.text}") + + +if __name__ == "__main__": + # result = get_limit_up(date="20210716") + # print(result) + # result = get_limit_stats(date="20210716") + # print(result) + # result = get_limit_down(date="20210716") + # print(result) + result = get_continuous_limit_up(date="20210716") + print(result) + + +# the __all__ is generated +__all__ = ["get_continuous_limit_up", "get_limit_stats", "get_limit_up", "get_limit_down", "get_jkqa_data"] diff --git a/src/zvt/recorders/qmt/__init__.py b/src/zvt/recorders/qmt/__init__.py new file mode 100644 index 00000000..26a1b94f --- /dev/null +++ b/src/zvt/recorders/qmt/__init__.py @@ -0,0 +1,27 @@ +# -*- coding: utf-8 -*-# + + +# the __all__ is generated +__all__ = [] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule quotes +from .quotes import * +from .quotes import __all__ as _quotes_all + +__all__ += _quotes_all + +# import all from submodule index +from .index import * +from .index import __all__ as _index_all + +__all__ += _index_all + +# import all from submodule meta +from .meta import * +from .meta import __all__ as _meta_all + +__all__ += _meta_all diff --git a/zvt/factors/ma/domain/__init__.py b/src/zvt/recorders/qmt/index/__init__.py similarity index 51% rename from zvt/factors/ma/domain/__init__.py rename to src/zvt/recorders/qmt/index/__init__.py index 39241866..ca4cb75c 100644 --- a/zvt/factors/ma/domain/__init__.py +++ b/src/zvt/recorders/qmt/index/__init__.py @@ -1,4 +1,6 @@ # -*- coding: utf-8 -*- + + # the __all__ is generated __all__ = [] @@ -6,7 +8,8 @@ # common code of the package # export interface in __all__ which contains __all__ of its sub modules -# import all from submodule stock_1d_ma_factor -from .stock_1d_ma_factor import * -from .stock_1d_ma_factor import __all__ as _stock_1d_ma_factor_all -__all__ += _stock_1d_ma_factor_all \ No newline at end of file +# import all from submodule qmt_index_recorder +from .qmt_index_recorder import * +from .qmt_index_recorder import __all__ as _qmt_index_recorder_all + +__all__ += _qmt_index_recorder_all diff --git a/src/zvt/recorders/qmt/index/qmt_index_recorder.py b/src/zvt/recorders/qmt/index/qmt_index_recorder.py new file mode 100644 index 00000000..0d7480dd --- /dev/null +++ b/src/zvt/recorders/qmt/index/qmt_index_recorder.py @@ -0,0 +1,189 @@ +# -*- coding: utf-8 -*- +import pandas as pd + +from zvt.api.kdata import get_kdata_schema +from zvt.broker.qmt import qmt_quote +from zvt.consts import IMPORTANT_INDEX +from zvt.contract import IntervalLevel +from zvt.contract.api import df_to_db +from zvt.contract.recorder import FixedCycleDataRecorder +from zvt.contract.utils import evaluate_size_from_timestamp +from zvt.domain import Index, IndexKdataCommon +from zvt.utils.pd_utils import pd_is_not_null +from zvt.utils.time_utils import TIME_FORMAT_DAY, TIME_FORMAT_MINUTE, current_date, to_time_str + + +class QmtIndexRecorder(FixedCycleDataRecorder): + provider = "qmt" + # class level kdata schema should always use common + data_schema = IndexKdataCommon + entity_provider = "em" + entity_schema = Index + download_history_data = False + + def __init__( + self, + force_update=True, + sleeping_time=10, + exchanges=None, + entity_id=None, + entity_ids=None, + code=None, + codes=None, + day_data=False, + entity_filters=None, + ignore_failed=True, + real_time=False, + fix_duplicate_way="ignore", + start_timestamp=None, + end_timestamp=None, + level=IntervalLevel.LEVEL_1DAY, + kdata_use_begin_time=False, + one_day_trading_minutes=24 * 60, + return_unfinished=False, + download_history_data=False, + ) -> None: + level = IntervalLevel(level) + self.entity_type = "index" + self.download_history_data = download_history_data + + self.data_schema = get_kdata_schema(entity_type=self.entity_type, level=level, adjust_type=None) + + super().__init__( + force_update, + sleeping_time, + exchanges, + entity_id, + entity_ids, + code, + codes, + day_data, + entity_filters, + ignore_failed, + real_time, + fix_duplicate_way, + start_timestamp, + end_timestamp, + level, + kdata_use_begin_time, + one_day_trading_minutes, + return_unfinished, + ) + self.one_day_trading_minutes = 240 + + def record(self, entity, start, end, size, timestamps): + if start and (self.level == IntervalLevel.LEVEL_1DAY): + start = start.date() + if not start: + start = "2005-01-01" + if not end: + end = current_date() + + # 统一高频数据习惯,减小数据更新次数,分钟K线需要直接多读1根K线,以兼容start_timestamp=9:30, end_timestamp=15:00的情况 + if self.level == IntervalLevel.LEVEL_1MIN: + end += pd.Timedelta(seconds=1) + + df = qmt_quote.get_kdata( + entity_id=entity.id, + start_timestamp=start, + end_timestamp=end, + adjust_type=None, + level=self.level, + download_history=self.download_history_data, + ) + time_str_fmt = TIME_FORMAT_DAY if self.level == IntervalLevel.LEVEL_1DAY else TIME_FORMAT_MINUTE + if pd_is_not_null(df): + df["entity_id"] = entity.id + df["timestamp"] = pd.to_datetime(df.index) + df["id"] = df.apply( + lambda row: f"{row['entity_id']}_{to_time_str(row['timestamp'], fmt=time_str_fmt)}", axis=1 + ) + df["provider"] = "qmt" + df["level"] = self.level.value + df["code"] = entity.code + df["name"] = entity.name + df.rename(columns={"amount": "turnover"}, inplace=True) + df["change_pct"] = (df["close"] - df["preClose"]) / df["preClose"] + df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) + + else: + self.logger.info(f"no kdata for {entity.id}") + + def evaluate_start_end_size_timestamps(self, entity): + if self.download_history_data and self.start_timestamp and self.end_timestamp: + # 历史数据可能碎片化,允许按照实际start和end之间有没有写满数据 + expected_size = evaluate_size_from_timestamp( + start_timestamp=self.start_timestamp, + end_timestamp=self.end_timestamp, + level=self.level, + one_day_trading_minutes=self.one_day_trading_minutes, + ) + + recorded_size = ( + self.session.query(self.data_schema) + .filter( + self.data_schema.entity_id == entity.id, + self.data_schema.timestamp >= self.start_timestamp, + self.data_schema.timestamp <= self.end_timestamp, + ) + .count() + ) + + if expected_size != recorded_size: + # print(f"expected_size: {expected_size}, recorded_size: {recorded_size}") + return self.start_timestamp, self.end_timestamp, self.default_size, None + + start_timestamp, end_timestamp, size, timestamps = super().evaluate_start_end_size_timestamps(entity) + # start_timestamp is the last updated timestamp + if self.end_timestamp is not None: + if start_timestamp >= self.end_timestamp: + return start_timestamp, end_timestamp, 0, None + else: + size = evaluate_size_from_timestamp( + start_timestamp=start_timestamp, + level=self.level, + one_day_trading_minutes=self.one_day_trading_minutes, + end_timestamp=self.end_timestamp, + ) + return start_timestamp, self.end_timestamp, size, timestamps + + return start_timestamp, end_timestamp, size, timestamps + + # # 中证,上海 + # def record_cs_index(self, index_type): + # df = cs_index_api.get_cs_index(index_type=index_type) + # df_to_db(data_schema=self.data_schema, df=df, provider=self.provider, force_update=True) + # self.logger.info(f"finish record {index_type} index") + # + # # 国证,深圳 + # def record_cn_index(self, index_type): + # if index_type == "cni": + # category_map_url = cn_index_api.cni_category_map_url + # elif index_type == "sz": + # category_map_url = cn_index_api.sz_category_map_url + # else: + # self.logger.error(f"not support index_type: {index_type}") + # assert False + # + # for category, _ in category_map_url.items(): + # df = cn_index_api.get_cn_index(index_type=index_type, category=category) + # df_to_db(data_schema=self.data_schema, df=df, provider=self.provider, force_update=True) + # self.logger.info(f"finish record {index_type} index:{category.value}") + + +if __name__ == "__main__": + # init_log('china_stock_category.log') + start_timestamp = pd.Timestamp("2024-12-01") + end_timestamp = pd.Timestamp("2024-12-03") + QmtIndexRecorder( + codes=IMPORTANT_INDEX, + level=IntervalLevel.LEVEL_1MIN, + sleeping_time=0, + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + download_history_data=True, + ).run() + + +# the __all__ is generated +__all__ = ["QmtIndexRecorder"] diff --git a/src/zvt/recorders/qmt/meta/__init__.py b/src/zvt/recorders/qmt/meta/__init__.py new file mode 100644 index 00000000..30915aab --- /dev/null +++ b/src/zvt/recorders/qmt/meta/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- + + +# the __all__ is generated +__all__ = [] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule qmt_stock_meta_recorder +from .qmt_stock_meta_recorder import * +from .qmt_stock_meta_recorder import __all__ as _qmt_stock_meta_recorder_all + +__all__ += _qmt_stock_meta_recorder_all diff --git a/src/zvt/recorders/qmt/meta/qmt_stock_meta_recorder.py b/src/zvt/recorders/qmt/meta/qmt_stock_meta_recorder.py new file mode 100644 index 00000000..5685b2e6 --- /dev/null +++ b/src/zvt/recorders/qmt/meta/qmt_stock_meta_recorder.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- + +from zvt.broker.qmt import qmt_quote +from zvt.contract.api import df_to_db +from zvt.contract.recorder import Recorder +from zvt.domain import Stock + + +class QMTStockRecorder(Recorder): + provider = "qmt" + data_schema = Stock + + def run(self): + df = qmt_quote.get_entity_list() + self.logger.info(df.tail()) + df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=True) + + +if __name__ == "__main__": + recorder = QMTStockRecorder() + recorder.run() + + +# the __all__ is generated +__all__ = ["QMTStockRecorder"] diff --git a/src/zvt/recorders/qmt/quotes/__init__.py b/src/zvt/recorders/qmt/quotes/__init__.py new file mode 100644 index 00000000..614197a3 --- /dev/null +++ b/src/zvt/recorders/qmt/quotes/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- + + +# the __all__ is generated +__all__ = [] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule qmt_kdata_recorder +from .qmt_kdata_recorder import * +from .qmt_kdata_recorder import __all__ as _qmt_kdata_recorder_all + +__all__ += _qmt_kdata_recorder_all diff --git a/src/zvt/recorders/qmt/quotes/qmt_kdata_recorder.py b/src/zvt/recorders/qmt/quotes/qmt_kdata_recorder.py new file mode 100644 index 00000000..732fcdd0 --- /dev/null +++ b/src/zvt/recorders/qmt/quotes/qmt_kdata_recorder.py @@ -0,0 +1,179 @@ +# -*- coding: utf-8 -*- +import pandas as pd + +from zvt.api.kdata import get_kdata_schema, get_kdata +from zvt.broker.qmt import qmt_quote +from zvt.contract import IntervalLevel, AdjustType +from zvt.contract.api import df_to_db, get_db_session, get_entities +from zvt.contract.recorder import FixedCycleDataRecorder +from zvt.domain import ( + Stock, + StockKdataCommon, +) +from zvt.utils.pd_utils import pd_is_not_null +from zvt.utils.time_utils import current_date, to_time_str, now_time_str + + +class BaseQmtKdataRecorder(FixedCycleDataRecorder): + default_size = 50000 + entity_provider: str = "qmt" + + provider = "qmt" + + def __init__( + self, + force_update=True, + sleeping_time=10, + exchanges=None, + entity_id=None, + entity_ids=None, + code=None, + codes=None, + day_data=False, + entity_filters=None, + ignore_failed=True, + real_time=False, + fix_duplicate_way="ignore", + start_timestamp=None, + end_timestamp=None, + level=IntervalLevel.LEVEL_1DAY, + kdata_use_begin_time=False, + one_day_trading_minutes=24 * 60, + adjust_type=AdjustType.qfq, + return_unfinished=False, + ) -> None: + level = IntervalLevel(level) + self.adjust_type = AdjustType(adjust_type) + self.entity_type = self.entity_schema.__name__.lower() + + self.data_schema = get_kdata_schema(entity_type=self.entity_type, level=level, adjust_type=self.adjust_type) + + super().__init__( + force_update, + sleeping_time, + exchanges, + entity_id, + entity_ids, + code, + codes, + day_data, + entity_filters, + ignore_failed, + real_time, + fix_duplicate_way, + start_timestamp, + end_timestamp, + level, + kdata_use_begin_time, + one_day_trading_minutes, + return_unfinished, + ) + + def init_entities(self): + """ + init the entities which we would record data for + + """ + if self.entity_provider == self.provider and self.entity_schema == self.data_schema: + self.entity_session = self.session + else: + self.entity_session = get_db_session(provider=self.entity_provider, data_schema=self.entity_schema) + + if self.day_data: + df = self.data_schema.query_data( + start_timestamp=now_time_str(), columns=["entity_id", "timestamp"], provider=self.provider + ) + if pd_is_not_null(df): + entity_ids = df["entity_id"].tolist() + self.logger.info(f"ignore entity_ids:{entity_ids}") + if self.entity_filters: + self.entity_filters.append(self.entity_schema.entity_id.notin_(entity_ids)) + else: + self.entity_filters = [self.entity_schema.entity_id.notin_(entity_ids)] + + #: init the entity list + self.entities = get_entities( + session=self.entity_session, + entity_schema=self.entity_schema, + exchanges=self.exchanges, + entity_ids=self.entity_ids, + codes=self.codes, + return_type="domain", + provider=self.entity_provider, + filters=self.entity_filters, + ) + + def record(self, entity, start, end, size, timestamps): + if start and (self.level == IntervalLevel.LEVEL_1DAY): + start = start.date() + + # 判断是否需要重新计算之前保存的前复权数据 + if start and (self.adjust_type == AdjustType.qfq): + check_df = qmt_quote.get_kdata( + entity_id=entity.id, + start_timestamp=start, + end_timestamp=start, + adjust_type=self.adjust_type, + level=self.level, + download_history=False, + ) + if pd_is_not_null(check_df): + current_df = get_kdata( + entity_id=entity.id, + provider=self.provider, + start_timestamp=start, + end_timestamp=start, + limit=1, + level=self.level, + adjust_type=self.adjust_type, + ) + if pd_is_not_null(current_df): + old = current_df.iloc[0, :]["close"] + new = check_df["close"][0] + # 相同时间的close不同,表明前复权需要重新计算 + if round(old, 2) != round(new, 2): + # 删掉重新获取 + self.session.query(self.data_schema).filter(self.data_schema.entity_id == entity.id).delete() + start = "2005-01-01" + + if not start: + start = "2005-01-01" + if not end: + end = current_date() + + df = qmt_quote.get_kdata( + entity_id=entity.id, + start_timestamp=start, + end_timestamp=end, + adjust_type=self.adjust_type, + level=self.level, + download_history=False, + ) + if pd_is_not_null(df): + df["entity_id"] = entity.id + df["timestamp"] = pd.to_datetime(df.index) + df["id"] = df.apply(lambda row: f"{row['entity_id']}_{to_time_str(row['timestamp'])}", axis=1) + df["provider"] = "qmt" + df["level"] = self.level.value + df["code"] = entity.code + df["name"] = entity.name + df.rename(columns={"amount": "turnover"}, inplace=True) + df["change_pct"] = (df["close"] - df["preClose"]) / df["preClose"] + df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) + + else: + self.logger.info(f"no kdata for {entity.id}") + + +class QMTStockKdataRecorder(BaseQmtKdataRecorder): + entity_schema = Stock + data_schema = StockKdataCommon + + +if __name__ == "__main__": + # Stock.record_data(provider="qmt") + QMTStockKdataRecorder(entity_id="stock_sz_301611", adjust_type=AdjustType.qfq).run() + + +# the __all__ is generated +__all__ = ["BaseQmtKdataRecorder", "QMTStockKdataRecorder"] diff --git a/src/zvt/recorders/sina/__init__.py b/src/zvt/recorders/sina/__init__.py new file mode 100644 index 00000000..6c7058af --- /dev/null +++ b/src/zvt/recorders/sina/__init__.py @@ -0,0 +1,27 @@ +# -*- coding: utf-8 -*-# + + +# the __all__ is generated +__all__ = [] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule quotes +from .quotes import * +from .quotes import __all__ as _quotes_all + +__all__ += _quotes_all + +# import all from submodule money_flow +from .money_flow import * +from .money_flow import __all__ as _money_flow_all + +__all__ += _money_flow_all + +# import all from submodule meta +from .meta import * +from .meta import __all__ as _meta_all + +__all__ += _meta_all diff --git a/src/zvt/recorders/sina/meta/__init__.py b/src/zvt/recorders/sina/meta/__init__.py new file mode 100644 index 00000000..819210fd --- /dev/null +++ b/src/zvt/recorders/sina/meta/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*-# + + +# the __all__ is generated +__all__ = [] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule sina_block_recorder +from .sina_block_recorder import * +from .sina_block_recorder import __all__ as _sina_block_recorder_all + +__all__ += _sina_block_recorder_all diff --git a/src/zvt/recorders/sina/meta/sina_block_recorder.py b/src/zvt/recorders/sina/meta/sina_block_recorder.py new file mode 100644 index 00000000..eb0681ad --- /dev/null +++ b/src/zvt/recorders/sina/meta/sina_block_recorder.py @@ -0,0 +1,114 @@ +# -*- coding: utf-8 -*- +import json + +import demjson3 +import pandas as pd +import requests + +from zvt.api.utils import china_stock_code_to_id +from zvt.contract.api import df_to_db +from zvt.contract.recorder import Recorder, TimeSeriesDataRecorder +from zvt.domain import BlockStock, BlockCategory, Block +from zvt.utils.time_utils import now_pd_timestamp + + +class SinaBlockRecorder(Recorder): + provider = "sina" + data_schema = Block + + # 用于抓取行业/概念/地域列表 + category_map_url = { + BlockCategory.industry: "http://vip.stock.finance.sina.com.cn/q/view/newSinaHy.php", + BlockCategory.concept: "http://money.finance.sina.com.cn/q/view/newFLJK.php?param=class" + # StockCategory.area: 'http://money.finance.sina.com.cn/q/view/newFLJK.php?param=area', + } + + def run(self): + # get stock blocks from sina + for category, url in self.category_map_url.items(): + resp = requests.get(url) + resp.encoding = "GBK" + + tmp_str = resp.text + json_str = tmp_str[tmp_str.index("{") : tmp_str.index("}") + 1] + tmp_json = json.loads(json_str) + + the_list = [] + + for code in tmp_json: + name = tmp_json[code].split(",")[1] + entity_id = f"block_cn_{code}" + the_list.append( + { + "id": entity_id, + "entity_id": entity_id, + "entity_type": "block", + "exchange": "cn", + "code": code, + "name": name, + "category": category.value, + } + ) + if the_list: + df = pd.DataFrame.from_records(the_list) + df_to_db(data_schema=self.data_schema, df=df, provider=self.provider, force_update=True) + + self.logger.info(f"finish record sina blocks:{category.value}") + + +class SinaChinaBlockStockRecorder(TimeSeriesDataRecorder): + entity_provider = "sina" + entity_schema = Block + + provider = "sina" + data_schema = BlockStock + + # 用于抓取行业包含的股票 + category_stocks_url = "http://vip.stock.finance.sina.com.cn/quotes_service/api/json_v2.php/Market_Center.getHQNodeData?page={}&num=5000&sort=symbol&asc=1&node={}&symbol=&_s_r_a=page" + + def record(self, entity, start, end, size, timestamps): + for page in range(1, 5): + resp = requests.get(self.category_stocks_url.format(page, entity.code)) + try: + if resp.text == "null" or resp.text is None: + break + category_jsons = demjson3.decode(resp.text) + the_list = [] + for category in category_jsons: + stock_code = category["code"] + stock_id = china_stock_code_to_id(stock_code) + block_id = entity.id + the_list.append( + { + "id": "{}_{}".format(block_id, stock_id), + "entity_id": block_id, + "entity_type": "block", + "exchange": entity.exchange, + "code": entity.code, + "name": entity.name, + "timestamp": now_pd_timestamp(), + "stock_id": stock_id, + "stock_code": stock_code, + "stock_name": category["name"], + } + ) + if the_list: + df = pd.DataFrame.from_records(the_list) + df_to_db(data_schema=self.data_schema, df=df, provider=self.provider, force_update=True) + + self.logger.info("finish recording BlockStock:{},{}".format(entity.category, entity.name)) + + except Exception as e: + self.logger.error("error:,resp.text:", e, resp.text) + self.sleep() + + +if __name__ == "__main__": + # init_log('sina_china_stock_category.log') + SinaBlockRecorder().run() + recorder = SinaChinaBlockStockRecorder(codes=["new_cbzz"]) + recorder.run() + + +# the __all__ is generated +__all__ = ["SinaBlockRecorder", "SinaChinaBlockStockRecorder"] diff --git a/src/zvt/recorders/sina/money_flow/__init__.py b/src/zvt/recorders/sina/money_flow/__init__.py new file mode 100644 index 00000000..f202b1fd --- /dev/null +++ b/src/zvt/recorders/sina/money_flow/__init__.py @@ -0,0 +1,21 @@ +# -*- coding: utf-8 -*-# + + +# the __all__ is generated +__all__ = [] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule sina_stock_money_flow_recorder +from .sina_stock_money_flow_recorder import * +from .sina_stock_money_flow_recorder import __all__ as _sina_stock_money_flow_recorder_all + +__all__ += _sina_stock_money_flow_recorder_all + +# import all from submodule sina_block_money_flow_recorder +from .sina_block_money_flow_recorder import * +from .sina_block_money_flow_recorder import __all__ as _sina_block_money_flow_recorder_all + +__all__ += _sina_block_money_flow_recorder_all diff --git a/src/zvt/recorders/sina/money_flow/sina_block_money_flow_recorder.py b/src/zvt/recorders/sina/money_flow/sina_block_money_flow_recorder.py new file mode 100644 index 00000000..39680c56 --- /dev/null +++ b/src/zvt/recorders/sina/money_flow/sina_block_money_flow_recorder.py @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- +import time + +import requests + +from zvt.contract.recorder import FixedCycleDataRecorder +from zvt.domain import BlockMoneyFlow, BlockCategory, Block +from zvt.utils.time_utils import to_pd_timestamp +from zvt.utils.utils import to_float + + +# 实时资金流 +# 'http://vip.stock.finance.sina.com.cn/quotes_service/api/json_v2.php/MoneyFlow.ssl_bkzj_bk?page=1&num=20&sort=netamount&asc=0&fenlei=1' +# 'http://vip.stock.finance.sina.com.cn/quotes_service/api/json_v2.php/MoneyFlow.ssl_bkzj_bk?page=1&num=20&sort=netamount&asc=0&fenlei=0' + + +class SinaBlockMoneyFlowRecorder(FixedCycleDataRecorder): + # entity的信息从哪里来 + entity_provider = "sina" + # entity的schema + entity_schema = Block + + # 记录的信息从哪里来 + provider = "sina" + # 记录的schema + data_schema = BlockMoneyFlow + + url = "http://vip.stock.finance.sina.com.cn/quotes_service/api/json_v2.php/MoneyFlow.ssl_bkzj_zjlrqs?page=1&num={}&sort=opendate&asc=0&bankuai={}%2F{}" + + def generate_url(self, category, code, number): + if category == BlockCategory.industry.value: + block = 0 + elif category == BlockCategory.concept.value: + block = 1 + + return self.url.format(number, block, code) + + def get_data_map(self): + return {} + + def record(self, entity, start, end, size, timestamps): + url = self.generate_url(category=entity.category, code=entity.code, number=size) + + resp = requests.get(url) + + opendate = "opendate" + avg_price = "avg_price" + avg_changeratio = "avg_changeratio" + turnover = "turnover" + netamount = "netamount" + ratioamount = "ratioamount" + r0_net = "r0_net" + r0_ratio = "r0_ratio" + r0x_ratio = "r0x_ratio" + cnt_r0x_ratio = "cnt_r0x_ratio" + + json_list = [] + try: + json_list = eval(resp.text) + except Exception as e: + resp.encoding = "GBK" + self.logger.error(resp.text) + time.sleep(60 * 5) + + result_list = [] + for item in json_list: + result_list.append( + { + "name": entity.name, + "timestamp": to_pd_timestamp(item["opendate"]), + "close": to_float(item["avg_price"]), + "change_pct": to_float(item["avg_changeratio"]), + "turnover_rate": to_float(item["turnover"]) / 10000, + "net_inflows": to_float(item["netamount"]), + "net_inflow_rate": to_float(item["ratioamount"]), + "net_main_inflows": to_float(item["r0_net"]), + "net_main_inflow_rate": to_float(item["r0_ratio"]), + } + ) + + return result_list + + +if __name__ == "__main__": + SinaBlockMoneyFlowRecorder(codes=["new_fjzz"]).run() + # SinaIndexMoneyFlowRecorder().run() + + +# the __all__ is generated +__all__ = ["SinaBlockMoneyFlowRecorder"] diff --git a/src/zvt/recorders/sina/money_flow/sina_stock_money_flow_recorder.py b/src/zvt/recorders/sina/money_flow/sina_stock_money_flow_recorder.py new file mode 100644 index 00000000..39c267f4 --- /dev/null +++ b/src/zvt/recorders/sina/money_flow/sina_stock_money_flow_recorder.py @@ -0,0 +1,131 @@ +# -*- coding: utf-8 -*- +import time + +import requests + +from zvt.contract.recorder import FixedCycleDataRecorder +from zvt.domain import StockMoneyFlow, Stock, StockTradeDay +from zvt.utils.time_utils import to_pd_timestamp, is_same_date, now_pd_timestamp +from zvt.utils.utils import to_float + + +class SinaStockMoneyFlowRecorder(FixedCycleDataRecorder): + entity_provider = "joinquant" + entity_schema = Stock + + provider = "sina" + data_schema = StockMoneyFlow + + url = "http://vip.stock.finance.sina.com.cn/quotes_service/api/json_v2.php/MoneyFlow.ssl_qsfx_lscjfb?page=1&num={}&sort=opendate&asc=0&daima={}" + + def init_entities(self): + super().init_entities() + # 过滤掉退市的 + self.entities = [ + entity for entity in self.entities if (entity.end_date is None) or (entity.end_date > now_pd_timestamp()) + ] + + # TODO:more general for the case using StockTradeDay + def evaluate_start_end_size_timestamps(self, entity): + start, end, size, timestamps = super().evaluate_start_end_size_timestamps(entity) + if start: + trade_day = StockTradeDay.query_data(limit=1, order=StockTradeDay.timestamp.desc(), return_type="domain") + if trade_day: + if is_same_date(trade_day[0].timestamp, start): + size = 0 + return start, end, size, timestamps + + def generate_url(self, code, number): + return self.url.format(number, code) + + def get_data_map(self): + return {} + + def record(self, entity, start, end, size, timestamps): + param = { + "url": self.generate_url(code="{}{}".format(entity.exchange, entity.code), number=size), + "security_item": entity, + } + + resp = requests.get(param["url"]) + # {opendate:"2019-04-29",trade:"10.8700",changeratio:"-0.0431338",turnover:"74.924",netamount:"-2903349.8500", + # ratioamount:"-0.155177",r0:"0.0000",r1:"2064153.0000",r2:"6485031.0000",r3:"10622169.2100",r0_net:"0.0000", + # r1_net:"2064153.0000",r2_net:"-1463770.0000",r3_net:"-3503732.8500"} + opendate = "opendate" + trade = "trade" + changeratio = "changeratio" + turnover = "turnover" + netamount = "netamount" + ratioamount = "ratioamount" + r0 = "r0" + r1 = "r1" + r2 = "r2" + r3 = "r3" + r0_net = "r0_net" + r1_net = "r1_net" + r2_net = "r2_net" + r3_net = "r3_net" + + json_list = [] + + try: + json_list = eval(resp.text) + except Exception as e: + resp.encoding = "GBK" + self.logger.error(resp.text) + time.sleep(60 * 5) + + result_list = [] + for item in json_list: + amount = to_float(item["r0"]) + to_float(item["r1"]) + to_float(item["r2"]) + to_float(item["r3"]) + + result = { + "timestamp": to_pd_timestamp(item["opendate"]), + "name": entity.name, + "close": to_float(item["trade"]), + "change_pct": to_float(item["changeratio"]), + "turnover_rate": to_float(item["turnover"]) / 10000, + "net_inflows": to_float(item["netamount"]), + "net_inflow_rate": to_float(item["ratioamount"]), + # # 主力=超大单+大单 + # net_main_inflows = Column(Float) + # net_main_inflow_rate = Column(Float) + # # 超大单 + # net_huge_inflows = Column(Float) + # net_huge_inflow_rate = Column(Float) + # # 大单 + # net_big_inflows = Column(Float) + # net_big_inflow_rate = Column(Float) + # + # # 中单 + # net_medium_inflows = Column(Float) + # net_medium_inflow_rate = Column(Float) + # # 小单 + # net_small_inflows = Column(Float) + # net_small_inflow_rate = Column(Float) + "net_main_inflows": to_float(item["r0_net"]) + to_float(item["r1_net"]), + "net_huge_inflows": to_float(item["r0_net"]), + "net_big_inflows": to_float(item["r1_net"]), + "net_medium_inflows": to_float(item["r2_net"]), + "net_small_inflows": to_float(item["r3_net"]), + } + + if amount != 0: + result["net_main_inflow_rate"] = (to_float(item["r0_net"]) + to_float(item["r1_net"])) / amount + result["net_huge_inflow_rate"] = to_float(item["r0_net"]) / amount + result["net_big_inflow_rate"] = to_float(item["r1_net"]) / amount + result["net_medium_inflow_rate"] = to_float(item["r2_net"]) / amount + result["net_small_inflow_rate"] = to_float(item["r3_net"]) / amount + + result_list.append(result) + + return result_list + + +if __name__ == "__main__": + SinaStockMoneyFlowRecorder(codes=["000406"]).run() + # SinaStockMoneyFlowRecorder().run() + + +# the __all__ is generated +__all__ = ["SinaStockMoneyFlowRecorder"] diff --git a/src/zvt/recorders/sina/quotes/__init__.py b/src/zvt/recorders/sina/quotes/__init__.py new file mode 100644 index 00000000..26f4d57e --- /dev/null +++ b/src/zvt/recorders/sina/quotes/__init__.py @@ -0,0 +1,21 @@ +# -*- coding: utf-8 -*-# + + +# the __all__ is generated +__all__ = [] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule sina_etf_kdata_recorder +from .sina_etf_kdata_recorder import * +from .sina_etf_kdata_recorder import __all__ as _sina_etf_kdata_recorder_all + +__all__ += _sina_etf_kdata_recorder_all + +# import all from submodule sina_index_kdata_recorder +from .sina_index_kdata_recorder import * +from .sina_index_kdata_recorder import __all__ as _sina_index_kdata_recorder_all + +__all__ += _sina_index_kdata_recorder_all diff --git a/src/zvt/recorders/sina/quotes/sina_etf_kdata_recorder.py b/src/zvt/recorders/sina/quotes/sina_etf_kdata_recorder.py new file mode 100644 index 00000000..7b7f0cb4 --- /dev/null +++ b/src/zvt/recorders/sina/quotes/sina_etf_kdata_recorder.py @@ -0,0 +1,125 @@ +# -*- coding: utf-8 -*- + +import demjson3 +import pandas as pd +import requests + +from zvt import init_log +from zvt.api.kdata import generate_kdata_id, get_kdata +from zvt.contract import IntervalLevel +from zvt.contract.recorder import FixedCycleDataRecorder +from zvt.domain import Etf, Etf1dKdata +from zvt.recorders.consts import EASTMONEY_ETF_NET_VALUE_HEADER +from zvt.utils.time_utils import to_time_str + + +class ChinaETFDayKdataRecorder(FixedCycleDataRecorder): + entity_provider = "exchange" + entity_schema = Etf + + provider = "sina" + data_schema = Etf1dKdata + url = ( + "http://money.finance.sina.com.cn/quotes_service/api/json_v2.php/CN_MarketData.getKLineData?" + "symbol={}{}&scale=240&&datalen={}&ma=no" + ) + + def get_data_map(self): + return {} + + def generate_domain_id(self, entity, original_data): + return generate_kdata_id(entity_id=entity.id, timestamp=original_data["timestamp"], level=self.level) + + def on_finish_entity(self, entity): + kdatas = get_kdata( + entity_id=entity.id, + level=IntervalLevel.LEVEL_1DAY.value, + order=Etf1dKdata.timestamp.asc(), + return_type="domain", + session=self.session, + filters=[Etf1dKdata.cumulative_net_value.is_(None)], + ) + + if kdatas and len(kdatas) > 0: + start = kdatas[0].timestamp + end = kdatas[-1].timestamp + + # 从东方财富获取基金累计净值 + df = self.fetch_cumulative_net_value(entity, start, end) + + if df is not None and not df.empty: + for kdata in kdatas: + if kdata.timestamp in df.index: + kdata.cumulative_net_value = df.loc[kdata.timestamp, "LJJZ"] + kdata.change_pct = df.loc[kdata.timestamp, "JZZZL"] + self.session.commit() + self.logger.info(f"{entity.code} - {entity.name}累计净值更新完成...") + + def fetch_cumulative_net_value(self, security_item, start, end) -> pd.DataFrame: + query_url = ( + "http://api.fund.eastmoney.com/f10/lsjz?" "fundCode={}&pageIndex={}&pageSize=200&startDate={}&endDate={}" + ) + + page = 1 + df = pd.DataFrame() + while True: + url = query_url.format(security_item.code, page, to_time_str(start), to_time_str(end)) + + response = requests.get(url, headers=EASTMONEY_ETF_NET_VALUE_HEADER) + response_json = demjson3.decode(response.text) + response_df = pd.DataFrame(response_json["Data"]["LSJZList"]) + + # 最后一页 + if response_df.empty: + break + + response_df["FSRQ"] = pd.to_datetime(response_df["FSRQ"]) + response_df["JZZZL"] = pd.to_numeric(response_df["JZZZL"], errors="coerce") + response_df["LJJZ"] = pd.to_numeric(response_df["LJJZ"], errors="coerce") + response_df = response_df.fillna(0) + response_df.set_index("FSRQ", inplace=True, drop=True) + + df = pd.concat([df, response_df]) + page += 1 + + self.sleep() + + return df + + def record(self, entity, start, end, size, timestamps): + # 此 url 不支持分页,如果超过我们想取的条数,则只能取最大条数 + if start is None or size > self.default_size: + size = 8000 + + param = {"security_item": entity, "level": self.level.value, "size": size} + + security_item = param["security_item"] + size = param["size"] + + url = ChinaETFDayKdataRecorder.url.format(security_item.exchange, security_item.code, size) + + response = requests.get(url) + response_json = demjson3.decode(response.text) + + if response_json is None or len(response_json) == 0: + return [] + + df = pd.DataFrame(response_json) + df.rename(columns={"day": "timestamp"}, inplace=True) + df["timestamp"] = pd.to_datetime(df["timestamp"]) + df["name"] = security_item.name + df["provider"] = "sina" + df["level"] = param["level"] + + return df.to_dict(orient="records") + + +__all__ = ["ChinaETFDayKdataRecorder"] + +if __name__ == "__main__": + init_log("sina_china_etf_day_kdata.log") + ChinaETFDayKdataRecorder(level=IntervalLevel.LEVEL_1DAY).run() + + +# the __all__ is generated +__all__ = ["ChinaETFDayKdataRecorder"] diff --git a/src/zvt/recorders/sina/quotes/sina_index_kdata_recorder.py b/src/zvt/recorders/sina/quotes/sina_index_kdata_recorder.py new file mode 100644 index 00000000..46cdeede --- /dev/null +++ b/src/zvt/recorders/sina/quotes/sina_index_kdata_recorder.py @@ -0,0 +1,130 @@ +# -*- coding: utf-8 -*- + +import time + +import pandas as pd +import requests + +from zvt.api.kdata import generate_kdata_id, get_kdata_schema +from zvt.contract import IntervalLevel, AdjustType +from zvt.contract.recorder import FixedCycleDataRecorder +from zvt.domain import Index, IndexKdataCommon +from zvt.utils.time_utils import get_year_quarters, is_same_date + + +class ChinaIndexDayKdataRecorder(FixedCycleDataRecorder): + entity_provider = "exchange" + entity_schema = Index + + provider = "sina" + data_schema = IndexKdataCommon + url = "http://vip.stock.finance.sina.com.cn/corp/go.php/vMS_MarketHistory/stockid/{}/type/S.phtml?year={}&jidu={}" + + def __init__( + self, + force_update=True, + sleeping_time=10, + exchanges=None, + entity_id=None, + entity_ids=None, + code=None, + codes=None, + day_data=False, + entity_filters=None, + ignore_failed=True, + real_time=False, + fix_duplicate_way="ignore", + start_timestamp=None, + end_timestamp=None, + level=IntervalLevel.LEVEL_1DAY, + kdata_use_begin_time=False, + one_day_trading_minutes=24 * 60, + return_unfinished=False, + ) -> None: + level = IntervalLevel(level) + self.adjust_type = AdjustType.qfq + self.entity_type = self.entity_schema.__name__.lower() + + self.data_schema = get_kdata_schema(entity_type=self.entity_type, level=level, adjust_type=self.adjust_type) + + super().__init__( + force_update, + sleeping_time, + exchanges, + entity_id, + entity_ids, + code, + codes, + day_data, + entity_filters, + ignore_failed, + real_time, + fix_duplicate_way, + start_timestamp, + end_timestamp, + level, + kdata_use_begin_time, + one_day_trading_minutes, + return_unfinished, + ) + + def get_data_map(self): + return {} + + def generate_domain_id(self, entity, original_data): + return generate_kdata_id(entity.id, timestamp=original_data["timestamp"], level=self.level) + + def record(self, entity, start, end, size, timestamps): + the_quarters = get_year_quarters(start) + if not is_same_date(entity.timestamp, start) and len(the_quarters) > 1: + the_quarters = the_quarters[1:] + + param = {"security_item": entity, "quarters": the_quarters, "level": self.level.value} + + security_item = param["security_item"] + quarters = param["quarters"] + level = param["level"] + + result_df = pd.DataFrame() + for year, quarter in quarters: + query_url = self.url.format(security_item.code, year, quarter) + response = requests.get(query_url) + response.encoding = "gbk" + + try: + dfs = pd.read_html(response.text) + except ValueError as error: + self.logger.error(f"skip ({year}-{quarter:02d}){security_item.code}{security_item.name}({error})") + time.sleep(10.0) + continue + + if len(dfs) < 5: + time.sleep(10.0) + continue + + df = dfs[4].copy() + df = df.iloc[1:] + df.columns = ["timestamp", "open", "high", "close", "low", "volume", "turnover"] + df["name"] = security_item.name + df["level"] = level + df["timestamp"] = pd.to_datetime(df["timestamp"]) + df["provider"] = "sina" + + result_df = pd.concat([result_df, df]) + + self.logger.info(f"({security_item.code}{security_item.name})({year}-{quarter:02d})") + time.sleep(10.0) + + result_df = result_df.sort_values(by="timestamp") + + return result_df.to_dict(orient="records") + + +__all__ = ["ChinaIndexDayKdataRecorder"] + +if __name__ == "__main__": + ChinaIndexDayKdataRecorder().run() + + +# the __all__ is generated +__all__ = ["ChinaIndexDayKdataRecorder"] diff --git a/src/zvt/recorders/wb/__init__.py b/src/zvt/recorders/wb/__init__.py new file mode 100644 index 00000000..590ebdeb --- /dev/null +++ b/src/zvt/recorders/wb/__init__.py @@ -0,0 +1,27 @@ +# -*- coding: utf-8 -*- + + +# the __all__ is generated +__all__ = [] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule wb_economy_recorder +from .wb_economy_recorder import * +from .wb_economy_recorder import __all__ as _wb_economy_recorder_all + +__all__ += _wb_economy_recorder_all + +# import all from submodule wb_country_recorder +from .wb_country_recorder import * +from .wb_country_recorder import __all__ as _wb_country_recorder_all + +__all__ += _wb_country_recorder_all + +# import all from submodule wb_api +from .wb_api import * +from .wb_api import __all__ as _wb_api_all + +__all__ += _wb_api_all diff --git a/src/zvt/recorders/wb/wb_api.py b/src/zvt/recorders/wb/wb_api.py new file mode 100644 index 00000000..a37349fd --- /dev/null +++ b/src/zvt/recorders/wb/wb_api.py @@ -0,0 +1,265 @@ +# -*- coding: utf-8 -*- +import itertools +import re +from copy import copy + +import pandas as pd +import requests + +from zvt.contract.api import get_entity_code +from zvt.utils.pd_utils import normal_index_df +from zvt.utils.time_utils import to_pd_timestamp + +WORLD_BANK_URL = "http://api.worldbank.org/v2" + +# thanks to https://github.com/mwouts/world_bank_data + +_economy_indicator_map = { + "population": "SP.POP.TOTL", + "gdp": "NY.GDP.MKTP.CD", + "gdp_per_capita": "NY.GDP.PCAP.CD", + "gdp_per_employed": "SL.GDP.PCAP.EM.KD", + "gdp_growth": "NY.GDP.MKTP.KD.ZG", + "agriculture_growth": "NV.AGR.TOTL.KD.ZG", + "industry_growth": "NV.IND.TOTL.KD.ZG", + "manufacturing_growth": "NV.IND.MANF.KD.ZG", + "service_growth": "NV.SRV.TOTL.KD.ZG", + "consumption_growth": "NE.CON.TOTL.KD.ZG", + "capital_growth": "NE.GDI.TOTL.KD.ZG", + "exports_growth": "NE.EXP.GNFS.KD.ZG", + "imports_growth": "NE.IMP.GNFS.KD.ZG", + "gni": "NY.GNP.ATLS.CD", + "gni_per_capita": "NY.GNP.PCAP.CD", + "gross_saving": "NY.GNS.ICTR.ZS", + "cpi": "FP.CPI.TOTL", + "unemployment_rate": "SL.UEM.TOTL.ZS", + "fdi_of_gdp": "BX.KLT.DINV.WD.GD.ZS", +} + + +def _collapse(values): + """Collapse multiple values to a colon-separated list of values""" + if isinstance(values, str): + return values + if values is None: + return "all" + if isinstance(values, list): + return ";".join([_collapse(v) for v in values]) + return str(values) + + +def _extract_preferred_field(data, id_or_value): + """In case the preferred representation of data when the latter has multiple representations""" + if not id_or_value: + return data + + if not data: + return "" + + if isinstance(data, dict): + if id_or_value in data: + return data[id_or_value] + + if isinstance(data, list): + return ",".join([_extract_preferred_field(i, id_or_value) for i in data]) + + return data + + +def _wb_get(paths: dict = None, **kwargs): + params = copy(kwargs) + params.setdefault("format", "json") + params.setdefault("per_page", 20000) + + url = "/".join([WORLD_BANK_URL] + list(itertools.chain.from_iterable([(k, _collapse(paths[k])) for k in paths]))) + + response = requests.get(url=url, params=params) + response.raise_for_status() + try: + data = response.json() + except ValueError: + raise ValueError( + "{msg}\nurl={url}\nparams={params}".format(msg=_extract_message(response.text), url=url, params=params) + ) + if isinstance(data, list) and data and "message" in data[0]: + try: + msg = data[0]["message"][0]["value"] + except (KeyError, IndexError): + msg = str(msg) + + raise ValueError("{msg}\nurl={url}\nparams={params}".format(msg=msg, url=url, params=params)) + + # Redo the request and get the full information when the first response is incomplete + if isinstance(data, list): + page_information, data = data + if "page" not in params: + current_page = 1 + while current_page < int(page_information["pages"]): + params["page"] = current_page = int(page_information["page"]) + 1 + response = requests.get(url=url, params=params) + response.raise_for_status() + page_information, new_data = response.json() + data.extend(new_data) + + if not data: + raise RuntimeError("The request returned no data:\nurl={url}\nparams={params}".format(url=url, params=params)) + + return data + + +def _extract_message(msg): + """' + + The indicator was not found. It may have been deleted or archived. + '""" + if "wb:message" not in msg: + return msg + return re.sub( + re.compile(".*]*>", re.DOTALL), "", re.sub(re.compile(".*", re.DOTALL), "", msg) + ) + + +def _get_meta(name, filters=None, expected=None, **params): + """Request data and return it in the form of a data frame""" + filters = _collapse(filters) + id_or_value = "value" + + if expected and id_or_value not in expected: + raise ValueError("'id_or_value' should be one of '{}'".format("', '".join(expected))) + + data = _wb_get(paths={name: filters}, **params) + + # We get a list (countries) of dictionary (properties) + columns = data[0].keys() + records = {} + + for col in columns: + records[col] = [_extract_preferred_field(cnt[col], id_or_value) for cnt in data] + + return pd.DataFrame(records, columns=columns) + + +def get_countries(): + df = _get_meta("country", expected=["id", "iso2code", "value"]) + + for col in ["latitude", "longitude"]: + df[col] = pd.to_numeric(df[col]) + df.rename( + columns={ + "iso2Code": "code", + "incomeLevel": "income_level", + "lendingType": "lending_type", + "capitalCity": "capital_city", + }, + inplace=True, + ) + df["entity_type"] = "country" + df["exchange"] = "galaxy" + df["entity_id"] = df[["entity_type", "exchange", "code"]].apply(lambda x: "_".join(x.astype(str)), axis=1) + df["id"] = df["entity_id"] + return df + + +def get_indicators(indicator=None, language=None, id_or_value=None, **params): + """Return a DataFrame that describes one, multiple or all indicators, indexed by the indicator id. + :param indicator: None (all indicators), the id of an indicator, or a list of multiple ids + :param language: Desired language + :param id_or_value: Choose either 'id' or 'value' for columns 'source' and 'topics'""" + + if id_or_value == "iso2code": + id_or_value = "id" + + return _get_meta( + "indicator", indicator, language=language, id_or_value=id_or_value, expected=["id", "value"], **params + ) + + +def get_indicator_data(indicator, indicator_name=None, country=None, date=None): + datas = _wb_get(paths={"country": country, "indicator": indicator}, date=date) + records = [ + { + "code": item["country"]["id"], + "timestamp": to_pd_timestamp(item["date"]), + item["indicator"]["id"] if not indicator_name else indicator_name: item["value"], + } + for item in datas + ] + df = pd.DataFrame.from_records(data=records) + df = df.set_index(["code", "timestamp"]) + return df + + +def get_regions(region=None, language=None, **params): + """Return a DataFrame that describes one, multiple or all regions, indexed by the region id. + :param region: None (all regions), the id of a region, or a list of multiple ids + :param language: Desired language""" + return _get_meta("region", region, language, **params) + + +def get_sources(source=None, language=None, **params): + """Return a DataFrame that describes one, multiple or all sources, indexed by the source id. + :param source: None (all sources), the id of a source, or a list of multiple ids + :param language: Desired language""" + return _get_meta("source", source, language, **params) + + +def get_topics(topic=None, language=None, **params): + """Return a DataFrame that describes one, multiple or all sources, indexed by the source id. + :param topic: None (all topics), the id of a topic, or a list of multiple ids + :param language: Desired language""" + return _get_meta("topic", topic, language, **params) + + +def get_incomelevels(incomelevel=None, language=None, **params): + """Return a DataFrame that describes one, multiple or all income levels, indexed by the IL id. + :param incomelevel: None (all income levels), the id of an income level, or a list of multiple ids + :param language: Desired language""" + return _get_meta("incomelevel", incomelevel, language, **params) + + +def get_lendingtypes(lendingtype=None, language=None, **params): + """Return a DataFrame that describes one, multiple or all lending types, indexed by the LT id. + :param lendingtype: None (all lending types), the id of a lending type, or a list of multiple ids + :param language: Desired language""" + return _get_meta("lendingtype", lendingtype, language, **params) + + +def get_economy_data(entity_id, indicators=None, date=None): + country = get_entity_code(entity_id=entity_id) + if not indicators: + indicators = _economy_indicator_map.keys() + dfs = [] + for indicator in indicators: + data = get_indicator_data( + indicator=_economy_indicator_map.get(indicator), indicator_name=indicator, country=country, date=date + ) + dfs.append(data) + df = pd.concat(dfs, axis=1) + df = df.reset_index(drop=False) + df["entity_id"] = entity_id + df["id"] = df[["entity_id", "timestamp"]].apply(lambda x: "_".join(x.astype(str)), axis=1) + df = normal_index_df(df, drop=False) + return df + + +if __name__ == "__main__": + # df = get_countries() + # print(df) + df = get_economy_data(entity_id="country_galaxy_CN") + print(df) + # df = get_sources() + # print(df) + + +# the __all__ is generated +__all__ = [ + "get_countries", + "get_indicators", + "get_indicator_data", + "get_regions", + "get_sources", + "get_topics", + "get_incomelevels", + "get_lendingtypes", + "get_economy_data", +] diff --git a/src/zvt/recorders/wb/wb_country_recorder.py b/src/zvt/recorders/wb/wb_country_recorder.py new file mode 100644 index 00000000..87ba7335 --- /dev/null +++ b/src/zvt/recorders/wb/wb_country_recorder.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- + +from zvt.contract.api import df_to_db +from zvt.contract.recorder import Recorder +from zvt.domain.meta.country_meta import Country +from zvt.recorders.wb import wb_api + + +class WBCountryRecorder(Recorder): + provider = "wb" + data_schema = Country + + def run(self): + df = wb_api.get_countries() + df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) + + +if __name__ == "__main__": + recorder = WBCountryRecorder() + recorder.run() + + +# the __all__ is generated +__all__ = ["WBCountryRecorder"] diff --git a/src/zvt/recorders/wb/wb_economy_recorder.py b/src/zvt/recorders/wb/wb_economy_recorder.py new file mode 100644 index 00000000..d830c6d8 --- /dev/null +++ b/src/zvt/recorders/wb/wb_economy_recorder.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- + +from zvt.contract.api import df_to_db +from zvt.contract.recorder import FixedCycleDataRecorder +from zvt.domain import Country, Economy +from zvt.recorders.wb import wb_api +from zvt.utils.time_utils import current_date + + +class WBEconomyRecorder(FixedCycleDataRecorder): + entity_schema = Country + data_schema = Economy + entity_provider = "wb" + provider = "wb" + + def record(self, entity, start, end, size, timestamps): + date = None + if start: + date = f"{start.year}:{current_date().year}" + try: + df = wb_api.get_economy_data(entity_id=entity.id, date=date) + df["name"] = entity.name + df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) + # 一些地方获取不到数据会报错 + except Exception as e: + self.logger.warning(f"Failed to get {entity.name} economy data", e) + + +if __name__ == "__main__": + entity_ids = ["country_galaxy_CN", "country_galaxy_US"] + r = WBEconomyRecorder(entity_ids=entity_ids) + r.run() + + +# the __all__ is generated +__all__ = ["WBEconomyRecorder"] diff --git a/src/zvt/resources/concept_main_tag_mapping.json b/src/zvt/resources/concept_main_tag_mapping.json new file mode 100644 index 00000000..c944afd3 --- /dev/null +++ b/src/zvt/resources/concept_main_tag_mapping.json @@ -0,0 +1,399 @@ +{ + "广电": "文化传媒", + "数字阅读": "文化传媒", + "网络游戏": "文化传媒", + "手游概念": "文化传媒", + "影视概念": "文化传媒", + "可燃冰": "资源", + "油价相关": "资源", + "低碳冶金": "资源", + "基本金属": "资源", + "黄金概念": "资源", + "天然气": "资源", + "稀缺资源": "资源", + "页岩气": "资源", + "煤化工": "资源", + "油气设服": "资源", + "小金属概念": "资源", + "新能源": "新能源", + "BC电池": "新能源", + "TOPCon电池": "新能源", + "钒电池": "新能源", + "钙钛矿电池": "新能源", + "麒麟电池": "新能源", + "动力电池回收": "新能源", + "钠离子电池": "新能源", + "固态电池": "新能源", + "刀片电池": "新能源", + "HIT电池": "新能源", + "燃料电池": "新能源", + "锂电池": "新能源", + "储能": "新能源", + "充电桩": "新能源", + "熔盐储能": "新能源", + "换电概念": "新能源", + "盐湖提锂": "新能源", + "风能": "新能源", + "太阳能": "新能源", + "氢能源": "新能源", + "抽水蓄能": "新能源", + "光伏建筑一体化": "新能源", + "人造太阳": "新能源", + "可控核聚变": "新能源", + "核能核电": "新能源", + "绿色电力": "电力", + "特高压": "电力", + "虚拟电厂": "电力", + "智能电网": "电力", + "超超临界发电": "电力", + "半导体概念": "半导体", + "中芯概念": "半导体", + "存储芯片": "半导体", + "AI芯片": "半导体", + "汽车芯片": "半导体", + "国产芯片": "半导体", + "第三代半导体": "半导体", + "第四代半导体": "半导体", + "英伟达概念": "半导体", + "铜缆高速连接": "半导体", + "玻璃基板": "半导体", + "高带宽内存": "半导体", + "Chiplet概念": "半导体", + "光刻胶": "半导体", + "氮化镓": "半导体", + "EDA概念": "半导体", + "IGBT概念": "半导体", + "PCB": "半导体", + "VPN": "通信", + "IPv6": "通信", + "WiFi": "通信", + "毫米波概念": "通信", + "5G概念": "通信", + "6G概念": "通信", + "F5G概念": "通信", + "量子通信": "通信", + "新能源车": "汽车", + "华为汽车": "汽车", + "汽车拆解": "汽车", + "汽车一体化压铸": "汽车", + "小米汽车": "汽车", + "汽车热管理": "汽车", + "电子后视镜": "汽车", + "高压快充": "汽车", + "车联网": "汽车", + "激光雷达": "汽车", + "特斯拉": "汽车", + "EDR概念": "汽车", + "无人驾驶": "汽车", + "ETC": "汽车", + "物联网": "智能机器", + "2025规划": "智能机器", + "智能机器": "智能机器", + "工业互联": "智能机器", + "轮毂电机": "智能机器", + "发电机概念": "智能机器", + "同步磁阻电机": "智能机器", + "机器人执行器": "智能机器", + "新型工业化": "智能机器", + "工业母机": "智能机器", + "工业4.0": "智能机器", + "减速器": "智能机器", + "机器人概念": "智能机器", + "PLC概念": "智能机器", + "机器视觉": "智能机器", + "生物医药": "医药", + "痘病毒防治": "医药", + "地塞米松": "医药", + "消毒剂": "医药", + "口罩": "医药", + "肝素概念": "医药", + "健康中国": "医药", + "幽门螺杆菌概念": "医药", + "代糖概念": "医药", + "医疗器械概念": "医药", + "生物疫苗": "医药", + "维生素": "医药", + "注射器概念": "医药", + "流感": "医药", + "AI制药": "医药", + "中药概念": "医药", + "减肥药": "医药", + "创新药": "医药", + "新冠药物": "医药", + "长寿药": "医药", + "独家药品": "医药", + "病毒防治": "医药", + "SPD概念": "医药", + "辅助生殖": "医药", + "肝炎概念": "医药", + "蒙脱石散": "医药", + "血氧仪": "医药", + "熊去氧胆酸": "医药", + "抗原检测": "医药", + "抗菌面料": "医药", + "千金藤素": "医药", + "DRG/DIP": "医药", + "CRO": "医药", + "阿兹海默": "医药", + "CAR-T细胞疗法": "医药", + "新冠检测": "医药", + "青蒿素": "医药", + "超级真菌": "医药", + "气溶胶检测": "医药", + "重组蛋白": "医药", + "疫苗冷链": "医药", + "精准医疗": "医药", + "单抗概念": "医药", + "免疫治疗": "医药", + "基因测序": "医药", + "体外诊断": "医药", + "互联医疗": "医药", + "人脑工程": "医药", + "啤酒概念": "大消费", + "进口博览": "大消费", + "退税商店": "大消费", + "拼多多概念": "大消费", + "抖音小店": "大消费", + "乳业": "大消费", + "C2M概念": "大消费", + "调味品概念": "大消费", + "毛发医疗": "大消费", + "化妆品概念": "大消费", + "白酒": "大消费", + "医疗美容": "大消费", + "户外露营": "大消费", + "在线旅游": "大消费", + "跨境电商": "大消费", + "电商概念": "大消费", + "新零售": "大消费", + "智能家居": "大消费", + "网红直播": "大消费", + "免税概念": "大消费", + "预制菜概念": "大消费", + "培育钻石": "大消费", + "婴童概念": "大消费", + "托育服务": "大消费", + "智慧灯杆": "消费电子", + "UWB概念": "消费电子", + "电子纸概念": "消费电子", + "胎压监测": "消费电子", + "3D玻璃": "消费电子", + "屏下摄像": "消费电子", + "超清视频": "消费电子", + "植物照明": "消费电子", + "LED": "消费电子", + "3D摄像头": "消费电子", + "eSIM": "消费电子", + "蓝宝石": "消费电子", + "无线耳机": "消费电子", + "智能穿戴": "消费电子", + "AI手机": "消费电子", + "AIPC": "消费电子", + "柔性屏(折叠屏)": "消费电子", + "星闪概念": "消费电子", + "传感器": "消费电子", + "被动元件": "消费电子", + "小米概念": "消费电子", + "无线充电": "消费电子", + "智能电视": "消费电子", + "空间计算": "消费电子", + "裸眼3D": "消费电子", + "混合现实": "消费电子", + "增强现实": "消费电子", + "虚拟现实": "消费电子", + "MicroLED": "消费电子", + "MiniLED": "消费电子", + "OLED": "消费电子", + "化工原料": "化工", + "环氧丙烷": "化工", + "PVDF概念": "化工", + "新材料": "化工", + "MLCC": "化工", + "碳纤维": "化工", + "PEEK材料概念": "化工", + "磷化工": "化工", + "碳基材料": "化工", + "纳米银": "化工", + "碳化硅": "化工", + "复合集流体": "化工", + "有机硅": "化工", + "稀土永磁": "化工", + "石墨烯": "化工", + "氟化工": "化工", + "草甘膦": "化工", + "钛白粉": "化工", + "降解塑料": "化工", + "工业气体": "化工", + "氦气概念": "化工", + "超级电容": "化工", + "军民融合": "军工", + "海工装备": "军工", + "军工": "军工", + "航母概念": "军工", + "国家安防": "军工", + "空间站概念": "军工", + "大飞机": "军工", + "ERP概念": "AI", + "数字哨兵": "AI", + "电子身份证": "AI", + "电子车牌": "AI", + "大数据": "AI", + "智慧城市": "AI", + "云计算": "AI", + "国产软件": "AI", + "生物识别": "AI", + "RCS概念": "AI", + "远程办公": "AI", + "在线教育": "AI", + "百度概念": "AI", + "人工智能": "AI", + "液冷概念": "AI", + "光通信模块": "AI", + "CPO概念": "AI", + "AI语料": "AI", + "Kimi概念": "AI", + "Sora概念": "AI", + "短剧互动游戏": "AI", + "多模态AI": "AI", + "数据要素": "AI", + "算力概念": "AI", + "MLOps概念": "AI", + "ChatGPT概念": "AI", + "AIGC概念": "AI", + "数据确权": "AI", + "Web3.0": "AI", + "虚拟数字人": "AI", + "数字水印": "AI", + "数据安全": "AI", + "云游戏": "AI", + "数字孪生": "AI", + "边缘计算": "AI", + "数据中心": "AI", + "华为概念": "AI", + "鸿蒙概念": "AI", + "华为欧拉": "AI", + "华为昇腾": "AI", + "国资云概念": "AI", + "东数西算": "AI", + "网络安全": "AI", + "元宇宙概念": "AI", + "NFT概念": "AI", + "信创": "AI", + "数字经济": "AI", + "区块链": "AI", + "智慧政务": "AI", + "数字货币": "AI", + "电子竞技": "AI", + "知识产权": "AI", + "时空大数据": "AI", + "低空经济": "低空经济", + "飞行汽车(eVTOL)": "低空经济", + "无人机": "低空经济", + "建筑节能": "房地产", + "REITs概念": "房地产", + "租售同权": "房地产", + "铁路基建": "房地产", + "PPP模式": "房地产", + "工程机械概念": "房地产", + "新型城镇化": "房地产", + "装配建筑": "房地产", + "地下管网": "房地产", + "民爆概念": "房地产", + "参股期货": "金融", + "参股券商": "金融", + "参股保险": "金融", + "跨境支付": "金融", + "互联金融": "金融", + "券商概念": "金融", + "移动支付": "金融", + "参股银行": "金融", + "粮食概念": "农业", + "水产养殖": "农业", + "生态农业": "农业", + "蝗虫防治": "农业", + "农业种植": "农业", + "鸡肉概念": "农业", + "转基因": "农业", + "人造肉": "农业", + "食品安全": "农业", + "猪肉概念": "农业", + "生物质能发电": "公用", + "噪声防治": "公用", + "土壤修复": "公用", + "地热能": "公用", + "海绵城市": "公用", + "节能环保": "公用", + "尾气治理": "公用", + "职业教育": "公用", + "医废处理": "公用", + "快递概念": "物流", + "RCEP概念": "物流", + "央企改革": "国企", + "中特估": "国企", + "中字头": "国企", + "沪企改革": "国企", + "国企改革": "国企", + "世界杯": "其他", + "东盟自贸区概念": "其他", + "娃哈哈概念": "其他", + "空气能热泵": "其他", + "核酸采样亭": "其他", + "中俄贸易概念": "其他", + "净水概念": "其他", + "京津冀": "其他", + "低价股": "其他", + "商汤概念": "其他", + "粤港自贸": "其他", + "土地流转": "其他", + "壳资源": "其他", + "盲盒经济": "其他", + "内贸流通": "其他", + "京东金融": "其他", + "乡村振兴": "其他", + "东北振兴": "其他", + "社区团购": "其他", + "地摊经济": "其他", + "快手概念": "其他", + "蚂蚁概念": "其他", + "证金持股": "其他", + "养老概念": "其他", + "冷链物流": "其他", + "贬值受益": "其他", + "纾困概念": "其他", + "阿里概念": "其他", + "深圳特区": "其他", + "超级品牌": "其他", + "中超概念": "其他", + "养老金": "其他", + "专精特新": "其他", + "统一大市场": "其他", + "光伏高速公路": "其他", + "核污染防治": "其他", + "磁悬浮概念": "其他", + "垃圾分类": "其他", + "电子烟": "其他", + "工业大麻": "其他", + "全息技术": "其他", + "超导概念": "其他", + "北交所概念": "其他", + "赛马概念": "其他", + "体育产业": "其他", + "雄安新区": "其他", + "共享经济": "其他", + "彩票概念": "其他", + "苹果概念": "其他", + "供销社概念": "其他", + "水利建设": "其他", + "3D打印": "其他", + "创投": "其他", + "字节概念": "其他", + "海洋经济": "其他", + "上海自贸": "其他", + "一带一路": "其他", + "碳交易": "其他", + "宠物经济": "其他", + "航天概念": "商业航天", + "天基互联": "商业航天", + "北斗导航": "商业航天", + "通用航空": "商业航天" +} \ No newline at end of file diff --git a/src/zvt/resources/industry_main_tag_mapping.json b/src/zvt/resources/industry_main_tag_mapping.json new file mode 100644 index 00000000..b0c0a1c4 --- /dev/null +++ b/src/zvt/resources/industry_main_tag_mapping.json @@ -0,0 +1,88 @@ +{ + "风电设备": "新能源", + "电池": "新能源", + "光伏设备": "新能源", + "能源金属": "新能源", + "电源设备": "新能源", + "半导体": "半导体", + "电子化学品": "半导体", + "医疗服务": "医药", + "中药": "医药", + "化学制药": "医药", + "生物制品": "医药", + "医药商业": "医药", + "医疗器械": "医疗器械", + "贸易行业": "大消费", + "家用轻工": "大消费", + "造纸印刷": "大消费", + "酿酒行业": "大消费", + "珠宝首饰": "大消费", + "美容护理": "大消费", + "食品饮料": "大消费", + "旅游酒店": "大消费", + "商业百货": "大消费", + "纺织服装": "大消费", + "家电行业": "大消费", + "小金属": "资源", + "贵金属": "资源", + "有色金属": "资源", + "煤炭行业": "资源", + "石油行业": "资源", + "燃气": "资源", + "采掘行业": "资源", + "消费电子": "消费电子", + "电子元件": "消费电子", + "光学光电子": "消费电子", + "汽车零部件": "汽车", + "汽车服务": "汽车", + "汽车整车": "汽车", + "电机": "智能机器", + "通用设备": "智能机器", + "专用设备": "智能机器", + "仪器仪表": "智能机器", + "电网设备": "电力", + "电力行业": "电力", + "房地产开发": "房地产", + "房地产服务": "房地产", + "工程建设": "房地产", + "水泥建材": "房地产", + "装修装饰": "房地产", + "装修建材": "房地产", + "工程咨询服务": "房地产", + "钢铁行业": "房地产", + "工程机械": "房地产", + "非金属材料": "化工", + "包装材料": "化工", + "化学制品": "化工", + "化肥行业": "化工", + "化学原料": "化工", + "化纤行业": "化工", + "塑料制品": "化工", + "玻璃玻纤": "化工", + "橡胶制品": "化工", + "交运设备": "公用", + "航运港口": "公用", + "公用事业": "公用", + "航空机场": "公用", + "环保行业": "公用", + "铁路公路": "公用", + "证券": "金融", + "保险": "金融", + "银行": "金融", + "多元金融": "金融", + "通信服务": "通信", + "通信设备": "通信", + "互联网服务": "AI", + "软件开发": "AI", + "计算机设备": "AI", + "文化传媒": "文化传媒", + "教育": "文化传媒", + "游戏": "文化传媒", + "农牧饲渔": "农业", + "农药兽药": "农业", + "物流行业": "物流", + "航天航空": "商业航天", + "船舶制造": "军工", + "专业服务": "专业服务", + "综合行业": "综合行业" +} \ No newline at end of file diff --git a/src/zvt/resources/log_conf.yaml b/src/zvt/resources/log_conf.yaml new file mode 100644 index 00000000..c23289d7 --- /dev/null +++ b/src/zvt/resources/log_conf.yaml @@ -0,0 +1,42 @@ +version: 1 +disable_existing_loggers: False +formatters: + default: + # "()": uvicorn.logging.DefaultFormatter + format: '%(asctime)s %(levelname)s %(threadName)s %(message)s' + access: + # "()": uvicorn.logging.AccessFormatter + format: '%(asctime)s %(levelname)s %(threadName)s %(message)s' +handlers: + default: + formatter: default + class: logging.StreamHandler + stream: ext://sys.stderr + file: + class: logging.handlers.RotatingFileHandler + formatter: default + filename: server.log + maxBytes: 524288000 + level: INFO + backupCount: 10 + access: + formatter: access + class: logging.StreamHandler + stream: ext://sys.stdout +loggers: + uvicorn.error: + level: INFO + handlers: + - default + propagate: no + uvicorn.access: + level: INFO + handlers: + - access + propagate: no +root: + level: INFO + handlers: + - default + - file + propagate: no \ No newline at end of file diff --git a/src/zvt/resources/missed_concept.json b/src/zvt/resources/missed_concept.json new file mode 100644 index 00000000..61179f78 --- /dev/null +++ b/src/zvt/resources/missed_concept.json @@ -0,0 +1,62 @@ +[ + "北京冬奥", + "长江三角", + "HS300_", + "股权激励", + "标准普尔", + "预盈预增", + "上证50_", + "预亏预减", + "上证180_", + "滨海新区", + "MSCI中国", + "沪股通", + "转债标的", + "昨日涨停_含一字", + "万达概念", + "成渝特区", + "ST股", + "注册制次新股", + "机构重仓", + "IPO受益", + "参股新三板", + "破净股", + "上证380", + "宁组合", + "茅指数", + "深证100R", + "深股通", + "举牌", + "债转股", + "昨日连板_含一字", + "融资融券", + "央视50_", + "昨日触板", + "科创板做市股", + "QFII重仓", + "科创板做市商", + "独角兽", + "AB股", + "基金重仓", + "富士康", + "创业板综", + "中证500", + "次新股", + "富时罗素", + "百元股", + "创业成份", + "送转预期", + "B股", + "杭州亚运会", + "深成500", + "股权转让", + "社保重仓", + "昨日连板", + "微盘股", + "昨日涨停", + "GDR", + "分拆预期", + "高送转", + "湖北自贸", + "AH股" +] \ No newline at end of file diff --git a/src/zvt/resources/missed_industry.json b/src/zvt/resources/missed_industry.json new file mode 100644 index 00000000..0637a088 --- /dev/null +++ b/src/zvt/resources/missed_industry.json @@ -0,0 +1 @@ +[] \ No newline at end of file diff --git a/src/zvt/rest/__init__.py b/src/zvt/rest/__init__.py new file mode 100644 index 00000000..40a96afc --- /dev/null +++ b/src/zvt/rest/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/src/zvt/rest/data.py b/src/zvt/rest/data.py new file mode 100644 index 00000000..9dd2c79c --- /dev/null +++ b/src/zvt/rest/data.py @@ -0,0 +1,47 @@ +# -*- coding: utf-8 -*- +from fastapi import APIRouter +from fastapi.encoders import jsonable_encoder + +import zvt.contract as contract +import zvt.contract.api as contract_api + +data_router = APIRouter( + prefix="/api/data", + tags=["data"], + responses={404: {"description": "Not found"}}, +) + + +@data_router.get( + "/providers", + response_model=list, +) +def get_data_providers(): + """ + Get data providers + """ + return contract_api.get_providers() + + +@data_router.get( + "/schemas", + response_model=list, +) +def get_data_schemas(provider): + """ + Get schemas by provider + """ + return [schema.__name__ for schema in contract_api.get_schemas(provider=provider)] + + +@data_router.get( + "/query_data", + response_model=list, +) +def query_data(provider: str, schema: str): + """ + Get schemas by provider + """ + model: contract.Mixin = contract_api.get_schema_by_name(schema) + with contract_api.DBSession(provider=provider, data_schema=model)() as session: + return jsonable_encoder(model.query_data(session=session, limit=100, return_type="domain")) diff --git a/src/zvt/rest/factor.py b/src/zvt/rest/factor.py new file mode 100644 index 00000000..7c52d345 --- /dev/null +++ b/src/zvt/rest/factor.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +from typing import List + +from fastapi import APIRouter + +from zvt.contract import zvt_context +from zvt.factors import factor_service +from zvt.factors.factor_models import FactorRequestModel, TradingSignalModel + +factor_router = APIRouter( + prefix="/api/factor", + tags=["factor"], + responses={404: {"description": "Not found"}}, +) + + +@factor_router.get("/get_factors", response_model=List[str]) +def get_factors(): + return [name for name in zvt_context.factor_cls_registry.keys()] + + +@factor_router.post("/query_factor_result", response_model=List[TradingSignalModel]) +def query_factor_result(factor_request_model: FactorRequestModel): + return factor_service.query_factor_result(factor_request_model) diff --git a/src/zvt/rest/misc.py b/src/zvt/rest/misc.py new file mode 100644 index 00000000..a0a11120 --- /dev/null +++ b/src/zvt/rest/misc.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +from fastapi import APIRouter + +from zvt.misc import misc_service +from zvt.misc.misc_models import TimeMessage + +misc_router = APIRouter( + prefix="/api/misc", + tags=["misc"], + responses={404: {"description": "Not found"}}, +) + + +@misc_router.get( + "/time_message", + response_model=TimeMessage, +) +def get_time_message(): + """ + Get time message + """ + return misc_service.get_time_message() diff --git a/src/zvt/rest/trading.py b/src/zvt/rest/trading.py new file mode 100644 index 00000000..be51e3d8 --- /dev/null +++ b/src/zvt/rest/trading.py @@ -0,0 +1,113 @@ +import platform +from typing import List, Optional + +from fastapi import APIRouter, HTTPException +from fastapi_pagination import Page + +import zvt.contract.api as contract_api +import zvt.trading.trading_service as trading_service +from zvt.common.trading_models import BuyParameter, SellParameter, TradingResult +from zvt.trading.trading_models import ( + BuildTradingPlanModel, + TradingPlanModel, + QueryTradingPlanModel, + QueryStockQuoteModel, + StockQuoteStatsModel, + QueryStockQuoteSettingModel, + BuildQueryStockQuoteSettingModel, + QueryTagQuoteModel, + TagQuoteStatsModel, + KdataModel, + KdataRequestModel, + TSModel, + TSRequestModel, + QuoteStatsModel, +) +from zvt.trading.trading_schemas import QueryStockQuoteSetting + +trading_router = APIRouter( + prefix="/api/trading", + tags=["trading"], + responses={404: {"description": "Not found"}}, +) + + +@trading_router.post("/query_kdata", response_model=Optional[List[KdataModel]]) +def query_kdata(kdata_request_model: KdataRequestModel): + return trading_service.query_kdata(kdata_request_model) + + +@trading_router.post("/query_ts", response_model=Optional[List[TSModel]]) +def query_kdata(ts_request_model: TSRequestModel): + return trading_service.query_ts(ts_request_model) + + +@trading_router.get("/get_quote_stats", response_model=Optional[QuoteStatsModel]) +def get_quote_stats(): + return trading_service.query_quote_stats() + + +@trading_router.get("/get_query_stock_quote_setting", response_model=Optional[QueryStockQuoteSettingModel]) +def get_query_stock_quote_setting(): + with contract_api.DBSession(provider="zvt", data_schema=QueryStockQuoteSetting)() as session: + query_setting: List[QueryStockQuoteSetting] = QueryStockQuoteSetting.query_data( + session=session, return_type="domain" + ) + if query_setting: + return query_setting[0] + return None + + +@trading_router.post("/build_query_stock_quote_setting", response_model=QueryStockQuoteSettingModel) +def build_query_stock_quote_setting(build_query_stock_quote_setting_model: BuildQueryStockQuoteSettingModel): + return trading_service.build_query_stock_quote_setting(build_query_stock_quote_setting_model) + + +@trading_router.post("/query_tag_quotes", response_model=List[TagQuoteStatsModel]) +def query_tag_quotes(query_tag_quote_model: QueryTagQuoteModel): + return trading_service.query_tag_quotes(query_tag_quote_model) + + +@trading_router.post("/query_stock_quotes", response_model=Optional[StockQuoteStatsModel]) +def query_stock_quotes(query_stock_quote_model: QueryStockQuoteModel): + return trading_service.query_stock_quotes(query_stock_quote_model) + + +@trading_router.post("/build_trading_plan", response_model=TradingPlanModel) +def build_trading_plan(build_trading_plan_model: BuildTradingPlanModel): + return trading_service.build_trading_plan(build_trading_plan_model) + + +@trading_router.post("/query_trading_plan", response_model=Page[TradingPlanModel]) +def query_trading_plan(query_trading_plan_model: QueryTradingPlanModel): + return trading_service.query_trading_plan(query_trading_plan_model) + + +@trading_router.get("/get_current_trading_plan", response_model=List[TradingPlanModel]) +def get_current_trading_plan(): + return trading_service.get_current_trading_plan() + + +@trading_router.get("/get_future_trading_plan", response_model=List[TradingPlanModel]) +def get_future_trading_plan(): + return trading_service.get_future_trading_plan() + + +@trading_router.post("/buy", response_model=TradingResult) +def buy(buy_position_strategy: BuyParameter): + if platform.system() == "Windows": + from zvt.broker.qmt.context import qmt_context + + return qmt_context.qmt_account.buy(buy_position_strategy) + else: + raise HTTPException(status_code=500, detail="Please use qmt in windows! ") + + +@trading_router.post("/sell", response_model=TradingResult) +def sell(sell_position_strategy: SellParameter): + if platform.system() == "Windows": + from zvt.broker.qmt.context import qmt_context + + return qmt_context.qmt_account.sell(sell_position_strategy) + else: + raise HTTPException(status_code=500, detail="Please use qmt in windows! ") diff --git a/src/zvt/rest/work.py b/src/zvt/rest/work.py new file mode 100644 index 00000000..d5a51703 --- /dev/null +++ b/src/zvt/rest/work.py @@ -0,0 +1,263 @@ +# -*- coding: utf-8 -*- +from typing import List, Optional + +from fastapi import APIRouter + +import zvt.contract.api as contract_api +import zvt.tag.tag_service as tag_service +from zvt.domain import Stock +from zvt.tag.common import TagType +from zvt.tag.tag_models import ( + TagInfoModel, + CreateTagInfoModel, + StockTagsModel, + SimpleStockTagsModel, + SetStockTagsModel, + CreateStockPoolInfoModel, + StockPoolInfoModel, + CreateStockPoolsModel, + StockPoolsModel, + QueryStockTagStatsModel, + StockTagStatsModel, + QueryStockTagsModel, + QuerySimpleStockTagsModel, + ActivateSubTagsResultModel, + ActivateSubTagsModel, + BatchSetStockTagsModel, + StockTagOptions, + MainTagIndustryRelation, + MainTagSubTagRelation, + IndustryInfoModel, + ChangeMainTagModel, +) +from zvt.tag.tag_schemas import ( + StockTags, + MainTagInfo, + SubTagInfo, + HiddenTagInfo, + StockPoolInfo, + StockPools, + IndustryInfo, +) +from zvt.utils.time_utils import current_date + +work_router = APIRouter( + prefix="/api/work", + tags=["work"], + responses={404: {"description": "Not found"}}, +) + + +@work_router.post("/create_stock_pool_info", response_model=StockPoolInfoModel) +def create_stock_pool_info(create_stock_pool_info_model: CreateStockPoolInfoModel): + return tag_service.build_stock_pool_info(create_stock_pool_info_model, timestamp=current_date()) + + +@work_router.get("/get_stock_pool_info", response_model=List[StockPoolInfoModel]) +def get_stock_pool_info(): + with contract_api.DBSession(provider="zvt", data_schema=StockPoolInfo)() as session: + stock_pool_info: List[StockPoolInfo] = StockPoolInfo.query_data(session=session, return_type="domain") + return stock_pool_info + + +@work_router.post("/create_stock_pools", response_model=StockPoolsModel) +def create_stock_pools(create_stock_pools_model: CreateStockPoolsModel): + return tag_service.build_stock_pool(create_stock_pools_model, current_date()) + + +@work_router.get("/get_stock_pools", response_model=Optional[StockPoolsModel]) +def get_stock_pools(stock_pool_name: str): + with contract_api.DBSession(provider="zvt", data_schema=StockPools)() as session: + stock_pools: List[StockPools] = StockPools.query_data( + session=session, + filters=[StockPools.stock_pool_name == stock_pool_name], + order=StockPools.timestamp.desc(), + limit=1, + return_type="domain", + ) + if stock_pools: + return stock_pools[0] + return None + + +@work_router.get("/get_main_tag_info", response_model=List[TagInfoModel]) +def get_main_tag_info(): + """ + Get main_tag info + """ + with contract_api.DBSession(provider="zvt", data_schema=MainTagInfo)() as session: + tags_info: List[MainTagInfo] = MainTagInfo.query_data(session=session, return_type="domain") + return tags_info + + +@work_router.get("/get_sub_tag_info", response_model=List[TagInfoModel]) +def get_sub_tag_info(): + """ + Get sub_tag info + """ + with contract_api.DBSession(provider="zvt", data_schema=SubTagInfo)() as session: + tags_info: List[SubTagInfo] = SubTagInfo.query_data(session=session, return_type="domain") + return tags_info + + +@work_router.get("/get_main_tag_sub_tag_relation", response_model=MainTagSubTagRelation) +def get_main_tag_sub_tag_relation(main_tag): + return tag_service.get_main_tag_sub_tag_relation(main_tag=main_tag) + + +@work_router.get("/get_industry_info", response_model=List[IndustryInfoModel]) +def get_industry_info(): + """ + Get sub_tag info + """ + with contract_api.DBSession(provider="zvt", data_schema=IndustryInfo)() as session: + industry_info: List[IndustryInfo] = IndustryInfo.query_data(session=session, return_type="domain") + return industry_info + + +@work_router.get("/get_main_tag_industry_relation", response_model=MainTagIndustryRelation) +def get_main_tag_industry_relation(main_tag): + return tag_service.get_main_tag_industry_relation(main_tag=main_tag) + + +@work_router.get("/get_hidden_tag_info", response_model=List[TagInfoModel]) +def get_hidden_tag_info(): + """ + Get hidden_tag info + """ + with contract_api.DBSession(provider="zvt", data_schema=MainTagInfo)() as session: + tags_info: List[HiddenTagInfo] = HiddenTagInfo.query_data(session=session, return_type="domain") + return tags_info + + +@work_router.post("/create_main_tag_info", response_model=TagInfoModel) +def create_main_tag_info(tag_info: CreateTagInfoModel): + return tag_service.build_tag_info(tag_info, tag_type=TagType.main_tag) + + +@work_router.post("/create_sub_tag_info", response_model=TagInfoModel) +def create_sub_tag_info(tag_info: CreateTagInfoModel): + return tag_service.build_tag_info(tag_info, TagType.sub_tag) + + +@work_router.post("/create_hidden_tag_info", response_model=TagInfoModel) +def create_hidden_tag_info(tag_info: CreateTagInfoModel): + return tag_service.build_tag_info(tag_info, TagType.hidden_tag) + + +@work_router.post("/query_stock_tags", response_model=List[StockTagsModel]) +def query_stock_tags(query_stock_tags_model: QueryStockTagsModel): + """ + Get entity tags + """ + filters = [StockTags.entity_id.in_(query_stock_tags_model.entity_ids)] + + with contract_api.DBSession(provider="zvt", data_schema=StockTags)() as session: + tags: List[StockTags] = StockTags.query_data( + session=session, filters=filters, return_type="domain", order=StockTags.timestamp.desc() + ) + tags_dict = {tag.entity_id: tag for tag in tags} + sorted_tags = [tags_dict[entity_id] for entity_id in query_stock_tags_model.entity_ids] + return sorted_tags + + +@work_router.post("/query_simple_stock_tags", response_model=List[SimpleStockTagsModel]) +def query_simple_stock_tags(query_simple_stock_tags_model: QuerySimpleStockTagsModel): + """ + Get simple entity tags + """ + + entity_ids = query_simple_stock_tags_model.entity_ids + + filters = [StockTags.entity_id.in_(entity_ids)] + with contract_api.DBSession(provider="zvt", data_schema=StockTags)() as session: + tags: List[dict] = StockTags.query_data( + session=session, filters=filters, return_type="dict", order=StockTags.timestamp.desc() + ) + entity_tag_map = {item["entity_id"]: item for item in tags} + result_tags = [] + stocks = Stock.query_data(provider="em", entity_ids=[tag["entity_id"] for tag in tags], return_type="domain") + stocks_map = {item.entity_id: item for item in stocks} + for entity_id in entity_ids: + tag = entity_tag_map.get(entity_id) + tag["name"] = stocks_map.get(entity_id).name + if stocks_map.get(entity_id).controlling_holder_parent: + tag["controlling_holder_parent"] = stocks_map.get(entity_id).controlling_holder_parent + else: + tag["controlling_holder_parent"] = stocks_map.get(entity_id).controlling_holder + tag["top_ten_ratio"] = stocks_map.get(entity_id).top_ten_ratio + result_tags.append(tag) + return result_tags + + +@work_router.get("/get_stock_tag_options", response_model=StockTagOptions) +def get_stock_tag_options(entity_id: str): + """ + Get stock tag options + """ + return tag_service.get_stock_tag_options(entity_id=entity_id) + + +@work_router.post("/set_stock_tags", response_model=StockTagsModel) +def set_stock_tags(set_stock_tags_model: SetStockTagsModel): + """ + Set stock tags + """ + return tag_service.build_stock_tags( + set_stock_tags_model=set_stock_tags_model, timestamp=current_date(), set_by_user=True + ) + + +@work_router.post("/build_stock_tags", response_model=List[StockTagsModel]) +def build_stock_tags(set_stock_tags_model_list: List[SetStockTagsModel]): + """ + Set stock tags in batch + """ + return [ + tag_service.build_stock_tags( + set_stock_tags_model=set_stock_tags_model, timestamp=current_date(), set_by_user=True + ) + for set_stock_tags_model in set_stock_tags_model_list + ] + + +@work_router.post("/query_stock_tag_stats", response_model=List[StockTagStatsModel]) +def query_stock_tag_stats(query_stock_tag_stats_model: QueryStockTagStatsModel): + """ + Get stock tag stats + """ + + return tag_service.query_stock_tag_stats(query_stock_tag_stats_model=query_stock_tag_stats_model) + + +@work_router.post("/activate_sub_tags", response_model=ActivateSubTagsResultModel) +def activate_sub_tags(activate_sub_tags_model: ActivateSubTagsModel): + """ + Activate sub tags + """ + + return tag_service.activate_sub_tags(activate_sub_tags_model=activate_sub_tags_model) + + +@work_router.post("/batch_set_stock_tags", response_model=List[StockTagsModel]) +def batch_set_stock_tags(batch_set_stock_tags_model: BatchSetStockTagsModel): + return tag_service.batch_set_stock_tags(batch_set_stock_tags_model=batch_set_stock_tags_model) + + +@work_router.post("/build_main_tag_industry_relation", response_model=str) +def build_main_tag_industry_relation(relation: MainTagIndustryRelation): + tag_service.build_main_tag_industry_relation(main_tag_industry_relation=relation) + tag_service.activate_industry_list(industry_list=relation.industry_list) + return "success" + + +@work_router.post("/build_main_tag_sub_tag_relation", response_model=str) +def build_main_tag_sub_tag_relation(relation: MainTagSubTagRelation): + tag_service.build_main_tag_sub_tag_relation(main_tag_sub_tag_relation=relation) + # tag_service.activate_sub_tags(activate_sub_tags_model=ActivateSubTagsModel(sub_tags=relation.sub_tag_list)) + return "success" + + +@work_router.post("/change_main_tag", response_model=List[StockTagsModel]) +def change_main_tag(change_main_tag_model: ChangeMainTagModel): + return tag_service.change_main_tag(change_main_tag_model=change_main_tag_model) diff --git a/src/zvt/samples/__init__.py b/src/zvt/samples/__init__.py new file mode 100644 index 00000000..88dfe563 --- /dev/null +++ b/src/zvt/samples/__init__.py @@ -0,0 +1,2 @@ +# -*- coding: utf-8 -*- +from .stock_traders import * diff --git a/zvt/samples/data.zip b/src/zvt/samples/data.zip similarity index 100% rename from zvt/samples/data.zip rename to src/zvt/samples/data.zip diff --git a/src/zvt/samples/stock_traders.py b/src/zvt/samples/stock_traders.py new file mode 100644 index 00000000..def97675 --- /dev/null +++ b/src/zvt/samples/stock_traders.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +from zvt.contract import IntervalLevel +from zvt.factors.ma.ma_factor import CrossMaFactor +from zvt.factors.macd.macd_factor import BullFactor +from zvt.trader.trader import StockTrader + + +class MyMaTrader(StockTrader): + def init_factors( + self, entity_ids, entity_schema, exchanges, codes, start_timestamp, end_timestamp, adjust_type=None + ): + return [ + CrossMaFactor( + entity_ids=entity_ids, + entity_schema=entity_schema, + exchanges=exchanges, + codes=codes, + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + windows=[5, 10], + need_persist=False, + adjust_type=adjust_type, + ) + ] + + +class MyBullTrader(StockTrader): + def init_factors( + self, entity_ids, entity_schema, exchanges, codes, start_timestamp, end_timestamp, adjust_type=None + ): + return [ + BullFactor( + entity_ids=entity_ids, + entity_schema=entity_schema, + exchanges=exchanges, + codes=codes, + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + adjust_type=adjust_type, + ) + ] + + +if __name__ == "__main__": + # single stock with cross ma factor + MyMaTrader( + codes=["000338"], + level=IntervalLevel.LEVEL_1DAY, + start_timestamp="2018-01-01", + end_timestamp="2019-06-30", + trader_name="000338_ma_trader", + ).run() + + # single stock with bull factor + # MyBullTrader(codes=['000338'], level=IntervalLevel.LEVEL_1DAY, start_timestamp='2018-01-01', + # end_timestamp='2019-06-30', trader_name='000338_bull_trader').run() + + # multiple stocks with cross ma factor + # MyMaTrader(codes=SAMPLE_STOCK_CODES, level=IntervalLevel.LEVEL_1DAY, start_timestamp='2018-01-01', + # end_timestamp='2019-06-30', trader_name='sample_stocks_ma_trader').run() + + # multiple stocks with bull factor + # MyBullTrader(codes=SAMPLE_STOCK_CODES, level=IntervalLevel.LEVEL_1DAY, start_timestamp='2018-01-01', + # end_timestamp='2019-06-30', trader_name='sample_stocks_bull_trader').run() diff --git a/src/zvt/sched/__init__.py b/src/zvt/sched/__init__.py new file mode 100644 index 00000000..40a96afc --- /dev/null +++ b/src/zvt/sched/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/src/zvt/sched/sched.py b/src/zvt/sched/sched.py new file mode 100644 index 00000000..5118ed1d --- /dev/null +++ b/src/zvt/sched/sched.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +import logging +import os + +from apscheduler.executors.pool import ThreadPoolExecutor, ProcessPoolExecutor +from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore +from apscheduler.schedulers.background import BackgroundScheduler + +from zvt import ZVT_HOME + +logger = logging.getLogger(__name__) + +jobs_db_path = os.path.join(ZVT_HOME, "jobs.db") + + +jobstores = {"default": SQLAlchemyJobStore(url=f"sqlite:///{jobs_db_path}")} + +executors = {"default": ThreadPoolExecutor(20), "processpool": ProcessPoolExecutor(5)} +job_defaults = {"coalesce": False, "max_instances": 1} + +zvt_scheduler = BackgroundScheduler(jobstores=jobstores, executors=executors, job_defaults=job_defaults) + + +def sched_tasks(): + import platform + + if platform.system() == "Windows": + try: + from zvt.broker.qmt.qmt_quote import record_tick + + zvt_scheduler.add_job(func=record_tick, trigger="cron", hour=9, minute=19, day_of_week="mon-fri") + except Exception as e: + logger.error("QMT not work", e) + else: + logger.warning("QMT need run in Windows!") + + zvt_scheduler.start() + + +if __name__ == "__main__": + sched_tasks() diff --git a/src/zvt/tag/__init__.py b/src/zvt/tag/__init__.py new file mode 100644 index 00000000..efbe4554 --- /dev/null +++ b/src/zvt/tag/__init__.py @@ -0,0 +1,5 @@ +# -*- coding: utf-8 -*-# + + +# the __all__ is generated +__all__ = [] diff --git a/src/zvt/tag/common.py b/src/zvt/tag/common.py new file mode 100644 index 00000000..ac4cca7f --- /dev/null +++ b/src/zvt/tag/common.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +from enum import Enum + + +class StockPoolType(Enum): + system = "system" + custom = "custom" + dynamic = "dynamic" + + +class DynamicPoolType(Enum): + limit_up = "limit_up" + limit_down = "limit_down" + + +class InsertMode(Enum): + overwrite = "overwrite" + append = "append" + + +class TagType(Enum): + #: A tag is a main tag due to its extensive capacity. + main_tag = "main_tag" + sub_tag = "sub_tag" + hidden_tag = "hidden_tag" + + +class TagStatsQueryType(Enum): + simple = "simple" + details = "details" + + +# the __all__ is generated +__all__ = ["StockPoolType", "DynamicPoolType", "InsertMode", "TagType", "TagStatsQueryType"] diff --git a/src/zvt/tag/tag_models.py b/src/zvt/tag/tag_models.py new file mode 100644 index 00000000..6631bf4e --- /dev/null +++ b/src/zvt/tag/tag_models.py @@ -0,0 +1,284 @@ +# -*- coding: utf-8 -*- +from typing import Dict, Union, List, Optional + +from pydantic import field_validator, Field +from pydantic_core.core_schema import ValidationInfo + +from zvt.contract.model import MixinModel, CustomModel +from zvt.tag.common import StockPoolType, TagType, TagStatsQueryType, InsertMode +from zvt.tag.tag_utils import get_stock_pool_names + + +class TagInfoModel(MixinModel): + tag: str + tag_reason: Optional[str] = Field(default=None) + main_tag: Optional[str] = Field(default=None) + + +class CreateTagInfoModel(CustomModel): + tag: str + tag_reason: Optional[str] = Field(default=None) + + +class IndustryInfoModel(MixinModel): + industry_name: str + description: str + # related main tag + main_tag: str + + +class MainTagIndustryRelation(CustomModel): + main_tag: str + industry_list: List[str] + + +class MainTagSubTagRelation(CustomModel): + main_tag: str + sub_tag_list: List[str] + + +class ChangeMainTagModel(CustomModel): + current_main_tag: str + new_main_tag: str + + +class StockTagsModel(MixinModel): + main_tag: Optional[str] = Field(default=None) + main_tag_reason: Optional[str] = Field(default=None) + main_tags: Dict[str, str] + + sub_tag: Optional[str] = Field(default=None) + sub_tag_reason: Optional[str] = Field(default=None) + sub_tags: Union[Dict[str, str], None] + + active_hidden_tags: Union[Dict[str, str], None] + hidden_tags: Union[Dict[str, str], None] + set_by_user: bool = False + + +class SimpleStockTagsModel(CustomModel): + entity_id: str + name: str + main_tag: Optional[str] = Field(default=None) + main_tag_reason: Optional[str] = Field(default=None) + main_tags: Dict[str, str] + sub_tag: Union[str, None] + sub_tag_reason: Optional[str] = Field(default=None) + sub_tags: Union[Dict[str, str], None] + active_hidden_tags: Union[Dict[str, str], None] + controlling_holder_parent: Optional[str] = Field(default=None) + top_ten_ratio: Optional[float] = Field(default=None) + + +class QueryStockTagsModel(CustomModel): + entity_ids: List[str] + + +class QuerySimpleStockTagsModel(CustomModel): + entity_ids: List[str] + + +class BatchSetStockTagsModel(CustomModel): + entity_ids: List[str] + tag: str + tag_reason: Optional[str] = Field(default=None) + tag_type: TagType + + +class TagParameter(CustomModel): + main_tag: str + main_tag_reason: Optional[str] = Field(default=None) + sub_tag: Optional[str] = Field(default=None) + sub_tag_reason: Optional[str] = Field(default=None) + + +class StockTagOptions(CustomModel): + main_tag: Optional[str] = Field(default=None) + sub_tag: Optional[str] = Field(default=None) + # hidden_tags: Optional[List[str]] = Field(default=None) + active_hidden_tags: Optional[Dict[str, str]] = Field(default=None) + main_tag_options: List[CreateTagInfoModel] + sub_tag_options: List[CreateTagInfoModel] + hidden_tag_options: List[CreateTagInfoModel] + + +class SetStockTagsModel(CustomModel): + entity_id: str + main_tag: str + main_tag_reason: Optional[str] = Field(default=None) + sub_tag: Optional[str] = Field(default=None) + sub_tag_reason: Optional[str] = Field(default=None) + active_hidden_tags: Optional[Dict[str, str]] = Field(default=None) + + # @field_validator("main_tag") + # @classmethod + # def main_tag_must_be_in(cls, v: str) -> str: + # if v not in get_main_tags(): + # raise ValueError(f"main_tag: {v} must be created at main_tag_info at first") + # return v + # + # @field_validator("sub_tag") + # @classmethod + # def sub_tag_must_be_in(cls, v: str) -> str: + # if v and (v not in get_sub_tags()): + # raise ValueError(f"sub_tag: {v} must be created at sub_tag_info at first") + # return v + # + # @field_validator("active_hidden_tags") + # @classmethod + # def hidden_tag_must_be_in(cls, v: Union[Dict[str, str], None]) -> Union[Dict[str, str], None]: + # if v: + # for item in v.keys(): + # if item not in get_hidden_tags(): + # raise ValueError(f"hidden_tag: {v} must be created at hidden_tag_info at first") + # return v + + +class StockPoolModel(MixinModel): + stock_pool_name: str + entity_ids: List[str] + + +class StockPoolInfoModel(MixinModel): + stock_pool_type: StockPoolType + stock_pool_name: str + + +class CreateStockPoolInfoModel(CustomModel): + stock_pool_type: StockPoolType + stock_pool_name: str + + @field_validator("stock_pool_name") + @classmethod + def stock_pool_name_existed(cls, v: str) -> str: + if v in get_stock_pool_names(): + raise ValueError(f"stock_pool_name: {v} has been used") + return v + + +class StockPoolsModel(MixinModel): + stock_pool_name: str + entity_ids: List[str] + + +class CreateStockPoolsModel(CustomModel): + stock_pool_name: str + entity_ids: List[str] + insert_mode: InsertMode = Field(default=InsertMode.overwrite) + + # @field_validator("stock_pool_name") + # @classmethod + # def stock_pool_name_must_be_in(cls, v: str) -> str: + # if v: + # if v not in get_stock_pool_names(): + # raise ValueError(f"stock_pool_name: {v} must be created at stock_pool_info at first") + # return v + + +class QueryStockTagStatsModel(CustomModel): + stock_pool_name: Optional[str] = Field(default=None) + entity_ids: Optional[List[str]] = Field(default=None) + query_type: Optional[TagStatsQueryType] = Field(default=TagStatsQueryType.details) + + @field_validator("stock_pool_name", "entity_ids") + @classmethod + def phone_or_mobile_must_set_only_one(cls, v, validation_info: ValidationInfo, **kwargs): + if validation_info.field_name == "stock_pool_name": + other_field = "entity_ids" + else: + other_field = "stock_pool_name" + + other_value = kwargs.get(other_field) + + if v and other_value: + raise ValueError(f"Only one of 'stock_pool_name' or 'entity_ids' should be set.") + elif not v and not other_value: + raise ValueError("Either 'stock_pool_name' or 'entity_ids' must be set.") + + return v + + @field_validator("stock_pool_name") + @classmethod + def stock_pool_name_must_be_in(cls, v: str) -> str: + if v: + if v not in get_stock_pool_names(): + raise ValueError(f"stock_pool_name: {v} not existed") + return v + + +class StockTagDetailsModel(CustomModel): + entity_id: str + main_tag: Optional[str] = Field(default=None) + sub_tag: Optional[str] = Field(default=None) + hidden_tags: Union[List[str], None] + + #: 代码 + code: str + #: 名字 + name: str + #: 减持 + recent_reduction: Optional[bool] = Field(default=None) + #: 增持 + recent_acquisition: Optional[bool] = Field(default=None) + #: 解禁 + recent_unlock: Optional[bool] = Field(default=None) + #: 增发配股 + recent_additional_or_rights_issue: Optional[bool] = Field(default=None) + #: 业绩利好 + recent_positive_earnings_news: Optional[bool] = Field(default=None) + #: 业绩利空 + recent_negative_earnings_news: Optional[bool] = Field(default=None) + #: 上榜次数 + recent_dragon_and_tiger_count: Optional[int] = Field(default=None) + #: 违规行为 + recent_violation_alert: Optional[bool] = Field(default=None) + #: 利好 + recent_positive_news: Optional[bool] = Field(default=None) + #: 利空 + recent_negative_news: Optional[bool] = Field(default=None) + #: 新闻总结 + recent_news_summary: Optional[Dict[str, str]] = Field(default=None) + + +class StockTagStatsModel(MixinModel): + main_tag: str + turnover: Optional[float] = Field(default=None) + entity_count: Optional[int] = Field(default=None) + position: Optional[int] = Field(default=None) + is_main_line: Optional[bool] = Field(default=None) + main_line_continuous_days: Optional[int] = Field(default=None) + entity_ids: Optional[List[str]] = Field(default=None) + stock_details: Optional[List[StockTagDetailsModel]] = Field(default=None) + + +class ActivateSubTagsModel(CustomModel): + sub_tags: List[str] + + +class ActivateSubTagsResultModel(CustomModel): + tag_entity_ids: Dict[str, Union[List[str], None]] + + +# the __all__ is generated +__all__ = [ + "TagInfoModel", + "CreateTagInfoModel", + "StockTagsModel", + "SimpleStockTagsModel", + "QueryStockTagsModel", + "QuerySimpleStockTagsModel", + "BatchSetStockTagsModel", + "TagParameter", + "StockTagOptions", + "SetStockTagsModel", + "StockPoolModel", + "StockPoolInfoModel", + "CreateStockPoolInfoModel", + "StockPoolsModel", + "CreateStockPoolsModel", + "QueryStockTagStatsModel", + "StockTagDetailsModel", + "StockTagStatsModel", + "ActivateSubTagsModel", + "ActivateSubTagsResultModel", +] diff --git a/src/zvt/tag/tag_schemas.py b/src/zvt/tag/tag_schemas.py new file mode 100644 index 00000000..ccd3bd86 --- /dev/null +++ b/src/zvt/tag/tag_schemas.py @@ -0,0 +1,137 @@ +# -*- coding: utf-8 -*- + +from sqlalchemy import Column, String, JSON, Boolean, Float, Integer +from sqlalchemy.orm import declarative_base + +from zvt.contract import Mixin +from zvt.contract.register import register_schema + +StockTagsBase = declarative_base() + + +class IndustryInfo(StockTagsBase, Mixin): + __tablename__ = "industry_info" + + industry_name = Column(String, unique=True) + description = Column(String) + # related main tag + main_tag = Column(String) + + +class MainTagInfo(StockTagsBase, Mixin): + __tablename__ = "main_tag_info" + + tag = Column(String, unique=True) + tag_reason = Column(String) + + +class SubTagInfo(StockTagsBase, Mixin): + __tablename__ = "sub_tag_info" + + tag = Column(String, unique=True) + tag_reason = Column(String) + + # related main tag + main_tag = Column(String) + + +class HiddenTagInfo(StockTagsBase, Mixin): + __tablename__ = "hidden_tag_info" + + tag = Column(String, unique=True) + tag_reason = Column(String) + + +class StockTags(StockTagsBase, Mixin): + """ + Schema for storing stock tags + """ + + __tablename__ = "stock_tags" + + code = Column(String(length=64)) + name = Column(String(length=128)) + + main_tag = Column(String) + main_tag_reason = Column(String) + main_tags = Column(JSON) + + sub_tag = Column(String) + sub_tag_reason = Column(String) + sub_tags = Column(JSON) + + active_hidden_tags = Column(JSON) + hidden_tags = Column(JSON) + set_by_user = Column(Boolean, default=False) + + +class StockSystemTags(StockTagsBase, Mixin): + __tablename__ = "stock_system_tags" + #: 编码 + code = Column(String(length=64)) + #: 名字 + name = Column(String(length=128)) + #: 减持 + recent_reduction = Column(Boolean) + #: 增持 + recent_acquisition = Column(Boolean) + #: 解禁 + recent_unlock = Column(Boolean) + #: 增发配股 + recent_additional_or_rights_issue = Column(Boolean) + #: 业绩利好 + recent_positive_earnings_news = Column(Boolean) + #: 业绩利空 + recent_negative_earnings_news = Column(Boolean) + #: 上榜次数 + recent_dragon_and_tiger_count = Column(Integer) + #: 违规行为 + recent_violation_alert = Column(Boolean) + #: 利好 + recent_positive_news = Column(Boolean) + #: 利空 + recent_negative_news = Column(Boolean) + #: 新闻总结 + recent_news_summary = Column(JSON) + + +class StockPoolInfo(StockTagsBase, Mixin): + __tablename__ = "stock_pool_info" + stock_pool_type = Column(String) + stock_pool_name = Column(String, unique=True) + + +class StockPools(StockTagsBase, Mixin): + __tablename__ = "stock_pools" + stock_pool_name = Column(String) + entity_ids = Column(JSON) + + +class TagStats(StockTagsBase, Mixin): + __tablename__ = "tag_stats" + + stock_pool_name = Column(String) + main_tag = Column(String) + turnover = Column(Float) + entity_count = Column(Integer) + position = Column(Integer) + is_main_line = Column(Boolean) + main_line_continuous_days = Column(Integer) + entity_ids = Column(JSON) + + +register_schema(providers=["zvt"], db_name="stock_tags", schema_base=StockTagsBase) + + +# the __all__ is generated +__all__ = [ + "IndustryInfo", + "MainTagInfo", + "SubTagInfo", + "HiddenTagInfo", + "StockTags", + "StockSystemTags", + "StockPoolInfo", + "StockPools", + "TagStats", +] diff --git a/src/zvt/tag/tag_service.py b/src/zvt/tag/tag_service.py new file mode 100644 index 00000000..bc1e29e6 --- /dev/null +++ b/src/zvt/tag/tag_service.py @@ -0,0 +1,845 @@ +# -*- coding: utf-8 -*- +import logging +from typing import List + +import pandas as pd +from fastapi import HTTPException +from sqlalchemy import func + +import zvt.contract.api as contract_api +from zvt.api.selector import get_entity_ids_by_filter +from zvt.domain import BlockStock, Block, Stock +from zvt.tag.common import TagType, TagStatsQueryType, StockPoolType, InsertMode +from zvt.tag.tag_models import ( + SetStockTagsModel, + CreateStockPoolInfoModel, + CreateStockPoolsModel, + QueryStockTagStatsModel, + ActivateSubTagsModel, + BatchSetStockTagsModel, + TagParameter, + CreateTagInfoModel, + StockTagOptions, + MainTagIndustryRelation, + MainTagSubTagRelation, + ChangeMainTagModel, +) +from zvt.tag.tag_schemas import ( + StockTags, + StockPools, + StockPoolInfo, + TagStats, + StockSystemTags, + MainTagInfo, + SubTagInfo, + HiddenTagInfo, + IndustryInfo, +) +from zvt.tag.tag_utils import ( + get_sub_tags, + get_stock_pool_names, + get_main_tag_by_sub_tag, + get_main_tag_by_industry, +) +from zvt.utils.time_utils import to_pd_timestamp, to_time_str, current_date, now_pd_timestamp +from zvt.utils.utils import fill_dict, compare_dicts, flatten_list + +logger = logging.getLogger(__name__) + + +def stock_tags_need_update(stock_tags: StockTags, set_stock_tags_model: SetStockTagsModel): + if ( + stock_tags.main_tag != set_stock_tags_model.main_tag + or stock_tags.main_tag_reason != set_stock_tags_model.main_tag_reason + or stock_tags.sub_tag != set_stock_tags_model.sub_tag + or stock_tags.sub_tag_reason != set_stock_tags_model.sub_tag_reason + or not compare_dicts(stock_tags.active_hidden_tags, set_stock_tags_model.active_hidden_tags) + ): + return True + return False + + +def get_stock_tag_options(entity_id): + with contract_api.DBSession(provider="zvt", data_schema=StockTags)() as session: + datas: List[StockTags] = StockTags.query_data( + entity_id=entity_id, order=StockTags.timestamp.desc(), limit=1, return_type="domain", session=session + ) + main_tag_options = [] + sub_tag_options = [] + hidden_tag_options = [] + + main_tag = None + sub_tag = None + active_hidden_tags = None + stock_tags = None + if datas: + stock_tags = datas[0] + main_tag = stock_tags.main_tag + sub_tag = stock_tags.sub_tag + + if stock_tags.main_tags: + main_tag_options = [ + CreateTagInfoModel(tag=tag, tag_reason=tag_reason) + for tag, tag_reason in stock_tags.main_tags.items() + ] + + if stock_tags.sub_tags: + sub_tag_options = [ + CreateTagInfoModel(tag=tag, tag_reason=tag_reason) + for tag, tag_reason in stock_tags.sub_tags.items() + ] + + if stock_tags.active_hidden_tags: + active_hidden_tags = stock_tags.active_hidden_tags + + if stock_tags.hidden_tags: + hidden_tag_options = [ + CreateTagInfoModel(tag=tag, tag_reason=tag_reason) + for tag, tag_reason in stock_tags.hidden_tags.items() + ] + + main_tags_info: List[MainTagInfo] = MainTagInfo.query_data(session=session, return_type="domain") + if not main_tag: + main_tag = main_tags_info[0].tag + + main_tag_options = main_tag_options + [ + CreateTagInfoModel(tag=item.tag, tag_reason=item.tag_reason) + for item in main_tags_info + if not stock_tags or (not stock_tags.main_tags) or (item.tag not in stock_tags.main_tags) + ] + + sub_tags_info: List[SubTagInfo] = SubTagInfo.query_data(session=session, return_type="domain") + if not sub_tag: + sub_tag = sub_tags_info[0].tag + sub_tag_options = sub_tag_options + [ + CreateTagInfoModel(tag=item.tag, tag_reason=item.tag_reason) + for item in sub_tags_info + if not stock_tags or (not stock_tags.sub_tags) or (item.tag not in stock_tags.sub_tags) + ] + + hidden_tags_info: List[HiddenTagInfo] = HiddenTagInfo.query_data(session=session, return_type="domain") + hidden_tag_options = hidden_tag_options + [ + CreateTagInfoModel(tag=item.tag, tag_reason=item.tag_reason) + for item in hidden_tags_info + if not stock_tags or (not stock_tags.hidden_tags) or (item.tag not in stock_tags.hidden_tags) + ] + + return StockTagOptions( + main_tag=main_tag, + sub_tag=sub_tag, + active_hidden_tags=active_hidden_tags, + main_tag_options=main_tag_options, + sub_tag_options=sub_tag_options, + hidden_tag_options=hidden_tag_options, + ) + + +def build_stock_tags( + set_stock_tags_model: SetStockTagsModel, timestamp: pd.Timestamp, set_by_user: bool, keep_current=False +): + logger.info(set_stock_tags_model) + + main_tag_info = CreateTagInfoModel( + tag=set_stock_tags_model.main_tag, tag_reason=set_stock_tags_model.main_tag_reason + ) + if not is_tag_info_existed(tag_info=main_tag_info, tag_type=TagType.main_tag): + build_tag_info(tag_info=main_tag_info, tag_type=TagType.main_tag) + + if set_stock_tags_model.sub_tag: + sub_tag_info = CreateTagInfoModel( + tag=set_stock_tags_model.sub_tag, tag_reason=set_stock_tags_model.sub_tag_reason + ) + if not is_tag_info_existed(tag_info=sub_tag_info, tag_type=TagType.sub_tag): + build_tag_info(tag_info=sub_tag_info, tag_type=TagType.sub_tag) + + if set_stock_tags_model.active_hidden_tags: + for tag in set_stock_tags_model.active_hidden_tags: + hidden_tag_info = CreateTagInfoModel(tag=tag, tag_reason=set_stock_tags_model.active_hidden_tags.get(tag)) + if not is_tag_info_existed(tag_info=hidden_tag_info, tag_type=TagType.hidden_tag): + build_tag_info(tag_info=hidden_tag_info, tag_type=TagType.hidden_tag) + + with contract_api.DBSession(provider="zvt", data_schema=StockTags)() as session: + entity_id = set_stock_tags_model.entity_id + main_tags = {} + sub_tags = {} + hidden_tags = {} + datas = StockTags.query_data( + session=session, + entity_id=entity_id, + limit=1, + return_type="domain", + ) + + if datas: + assert len(datas) == 1 + current_stock_tags: StockTags = datas[0] + + # nothing change + if not stock_tags_need_update(current_stock_tags, set_stock_tags_model): + logger.info(f"Not change stock_tags for {set_stock_tags_model.entity_id}") + return current_stock_tags + + if current_stock_tags.main_tags: + main_tags = dict(current_stock_tags.main_tags) + if current_stock_tags.sub_tags: + sub_tags = dict(current_stock_tags.sub_tags) + if current_stock_tags.hidden_tags: + hidden_tags = dict(current_stock_tags.hidden_tags) + + else: + current_stock_tags = StockTags( + id=f"{entity_id}_tags", + entity_id=entity_id, + timestamp=timestamp, + ) + + # update tag + if not keep_current: + current_stock_tags.main_tag = set_stock_tags_model.main_tag + current_stock_tags.main_tag_reason = set_stock_tags_model.main_tag_reason + + if set_stock_tags_model.sub_tag: + current_stock_tags.sub_tag = set_stock_tags_model.sub_tag + if set_stock_tags_model.sub_tag_reason: + current_stock_tags.sub_tag_reason = set_stock_tags_model.sub_tag_reason + # could update to None + current_stock_tags.active_hidden_tags = set_stock_tags_model.active_hidden_tags + # update tags + main_tags[set_stock_tags_model.main_tag] = set_stock_tags_model.main_tag_reason + if set_stock_tags_model.sub_tag: + sub_tags[set_stock_tags_model.sub_tag] = set_stock_tags_model.sub_tag_reason + if set_stock_tags_model.active_hidden_tags: + for k, v in set_stock_tags_model.active_hidden_tags.items(): + hidden_tags[k] = v + current_stock_tags.main_tags = main_tags + current_stock_tags.sub_tags = sub_tags + current_stock_tags.hidden_tags = hidden_tags + + current_stock_tags.set_by_user = set_by_user + + session.add(current_stock_tags) + session.commit() + session.refresh(current_stock_tags) + return current_stock_tags + + +def build_tag_parameter(tag_type: TagType, tag, tag_reason, stock_tag: StockTags): + if tag_type == TagType.main_tag: + main_tag = tag + if main_tag in stock_tag.main_tags: + main_tag_reason = stock_tag.main_tags.get(main_tag, tag_reason) + else: + main_tag_reason = tag_reason + sub_tag = stock_tag.sub_tag + sub_tag_reason = stock_tag.sub_tag_reason + elif tag_type == TagType.sub_tag: + sub_tag = tag + if sub_tag in stock_tag.sub_tags: + sub_tag_reason = stock_tag.sub_tags.get(sub_tag, tag_reason) + else: + sub_tag_reason = tag_reason + main_tag = stock_tag.main_tag + main_tag_reason = stock_tag.main_tag_reason + else: + assert False + + return TagParameter( + main_tag=main_tag, main_tag_reason=main_tag_reason, sub_tag=sub_tag, sub_tag_reason=sub_tag_reason + ) + + +def batch_set_stock_tags(batch_set_stock_tags_model: BatchSetStockTagsModel): + if not batch_set_stock_tags_model.entity_ids: + return [] + + tag_info = CreateTagInfoModel(tag=batch_set_stock_tags_model.tag, tag_reason=batch_set_stock_tags_model.tag_reason) + if not is_tag_info_existed(tag_info=tag_info, tag_type=batch_set_stock_tags_model.tag_type): + build_tag_info(tag_info=tag_info, tag_type=batch_set_stock_tags_model.tag_type) + + with contract_api.DBSession(provider="zvt", data_schema=StockTags)() as session: + tag_type = batch_set_stock_tags_model.tag_type + if tag_type == TagType.main_tag: + main_tag = batch_set_stock_tags_model.tag + stock_tags: List[StockTags] = StockTags.query_data( + entity_ids=batch_set_stock_tags_model.entity_ids, + filters=[StockTags.main_tag != main_tag], + session=session, + return_type="domain", + ) + elif tag_type == TagType.sub_tag: + sub_tag = batch_set_stock_tags_model.tag + stock_tags: List[StockTags] = StockTags.query_data( + entity_ids=batch_set_stock_tags_model.entity_ids, + filters=[StockTags.sub_tag != sub_tag], + session=session, + return_type="domain", + ) + + for stock_tag in stock_tags: + tag_parameter: TagParameter = build_tag_parameter( + tag_type=tag_type, + tag=batch_set_stock_tags_model.tag, + tag_reason=batch_set_stock_tags_model.tag_reason, + stock_tag=stock_tag, + ) + set_stock_tags_model = SetStockTagsModel( + entity_id=stock_tag.entity_id, + main_tag=tag_parameter.main_tag, + main_tag_reason=tag_parameter.main_tag_reason, + sub_tag=tag_parameter.sub_tag, + sub_tag_reason=tag_parameter.sub_tag_reason, + active_hidden_tags=stock_tag.active_hidden_tags, + ) + + build_stock_tags( + set_stock_tags_model=set_stock_tags_model, + timestamp=now_pd_timestamp(), + set_by_user=True, + keep_current=False, + ) + session.refresh(stock_tag) + return stock_tags + + +def build_default_main_tag(entity_ids=None, force_rebuild=False): + """ + build default main tag by industry + + :param entity_ids: entity ids + :param force_rebuild: always rebuild it if True otherwise only build which not existed + """ + if not entity_ids: + entity_ids = get_entity_ids_by_filter( + provider="em", ignore_delist=True, ignore_st=False, ignore_new_stock=False + ) + + df_block = Block.query_data(provider="em", filters=[Block.category == "industry"]) + industry_codes = df_block["code"].tolist() + block_stocks: List[BlockStock] = BlockStock.query_data( + provider="em", + filters=[BlockStock.code.in_(industry_codes), BlockStock.stock_id.in_(entity_ids)], + return_type="domain", + ) + entity_id_block_mapping = {block_stock.stock_id: block_stock for block_stock in block_stocks} + + for entity_id in entity_ids: + stock_tags: List[StockTags] = StockTags.query_data(entity_id=entity_id, return_type="domain") + if not force_rebuild and stock_tags: + logger.info(f"{entity_id} main tag has been set.") + continue + + logger.info(f"build main tag for: {entity_id}") + + block_stock: BlockStock = entity_id_block_mapping.get(entity_id) + if block_stock: + main_tag = get_main_tag_by_industry(industry_name=block_stock.name) + main_tag_reason = f"来自行业:{block_stock.name}" + else: + main_tag = "其他" + main_tag_reason = "其他" + + build_stock_tags( + set_stock_tags_model=SetStockTagsModel( + entity_id=entity_id, + main_tag=main_tag, + main_tag_reason=main_tag_reason, + sub_tag=None, + sub_tag_reason=None, + active_hidden_tags=None, + ), + timestamp=now_pd_timestamp(), + set_by_user=False, + keep_current=False, + ) + + +def build_default_sub_tags(entity_ids=None): + if not entity_ids: + entity_ids = get_entity_ids_by_filter( + provider="em", ignore_delist=True, ignore_st=False, ignore_new_stock=False + ) + + for entity_id in entity_ids: + logger.info(f"build sub tag for: {entity_id}") + datas = StockTags.query_data(entity_id=entity_id, limit=1, return_type="domain") + if not datas: + raise AssertionError(f"Main tag must be set at first for {entity_id}") + + current_stock_tags: StockTags = datas[0] + keep_current = False + if current_stock_tags.set_by_user: + logger.info(f"keep current tags set by user for: {entity_id}") + keep_current = True + + current_sub_tag = current_stock_tags.sub_tag + filters = [BlockStock.stock_id == entity_id] + if current_sub_tag: + logger.info(f"{entity_id} current_sub_tag: {current_sub_tag}") + current_sub_tags = current_stock_tags.sub_tags.keys() + filters = filters + [BlockStock.name.notin_(current_sub_tags)] + + df_block = Block.query_data(provider="em", filters=[Block.category == "concept"]) + concept_codes = df_block["code"].tolist() + filters = filters + [BlockStock.code.in_(concept_codes)] + + block_stocks: List[BlockStock] = BlockStock.query_data( + provider="em", + filters=filters, + return_type="domain", + ) + if not block_stocks: + logger.info(f"no block_stocks for: {entity_id}") + continue + + for block_stock in block_stocks: + sub_tag = block_stock.name + if sub_tag in get_sub_tags(): + sub_tag_reason = f"来自概念:{sub_tag}" + + main_tag = get_main_tag_by_sub_tag(sub_tag) + main_tag_reason = sub_tag_reason + if (main_tag == "其他" or not main_tag) and current_stock_tags.main_tag: + main_tag = current_stock_tags.main_tag + main_tag_reason = current_stock_tags.main_tag_reason + + build_stock_tags( + set_stock_tags_model=SetStockTagsModel( + entity_id=entity_id, + main_tag=main_tag, + main_tag_reason=main_tag_reason, + sub_tag=sub_tag, + sub_tag_reason=sub_tag_reason, + active_hidden_tags=current_stock_tags.active_hidden_tags, + ), + timestamp=now_pd_timestamp(), + set_by_user=False, + keep_current=keep_current, + ) + else: + logger.info(f"ignore {sub_tag} not in sub_tag_info yet") + + +def get_tag_info_schema(tag_type: TagType): + if tag_type == TagType.main_tag: + data_schema = MainTagInfo + elif tag_type == TagType.sub_tag: + data_schema = SubTagInfo + elif tag_type == TagType.hidden_tag: + data_schema = HiddenTagInfo + else: + assert False + + return data_schema + + +def is_tag_info_existed(tag_info: CreateTagInfoModel, tag_type: TagType): + data_schema = get_tag_info_schema(tag_type=tag_type) + with contract_api.DBSession(provider="zvt", data_schema=data_schema)() as session: + current_tags_info = data_schema.query_data( + session=session, filters=[data_schema.tag == tag_info.tag], return_type="domain" + ) + if current_tags_info: + return True + return False + + +def build_tag_info(tag_info: CreateTagInfoModel, tag_type: TagType): + """ + Create tags info + """ + if is_tag_info_existed(tag_info=tag_info, tag_type=tag_type): + raise HTTPException(status_code=409, detail=f"This tag has been registered in {tag_type}") + + data_schema = get_tag_info_schema(tag_type=tag_type) + with contract_api.DBSession(provider="zvt", data_schema=data_schema)() as session: + timestamp = current_date() + entity_id = "admin" + tag_info_db = data_schema( + id=f"admin_{tag_info.tag}", + entity_id=entity_id, + timestamp=timestamp, + tag=tag_info.tag, + tag_reason=tag_info.tag_reason, + ) + session.add(tag_info_db) + session.commit() + session.refresh(tag_info_db) + return tag_info_db + + +def build_stock_pool_info(create_stock_pool_info_model: CreateStockPoolInfoModel, timestamp): + with contract_api.DBSession(provider="zvt", data_schema=StockPoolInfo)() as session: + stock_pool_info = StockPoolInfo( + entity_id="admin", + timestamp=to_pd_timestamp(timestamp), + id=f"admin_{create_stock_pool_info_model.stock_pool_name}", + stock_pool_type=create_stock_pool_info_model.stock_pool_type.value, + stock_pool_name=create_stock_pool_info_model.stock_pool_name, + ) + session.add(stock_pool_info) + session.commit() + session.refresh(stock_pool_info) + return stock_pool_info + + +def build_stock_pool(create_stock_pools_model: CreateStockPoolsModel, target_date=current_date()): + with contract_api.DBSession(provider="zvt", data_schema=StockPools)() as session: + if create_stock_pools_model.stock_pool_name not in get_stock_pool_names(): + build_stock_pool_info( + CreateStockPoolInfoModel( + stock_pool_type=StockPoolType.custom, stock_pool_name=create_stock_pools_model.stock_pool_name + ), + timestamp=target_date, + ) + # one instance per day + stock_pool_id = f"admin_{to_time_str(target_date)}_{create_stock_pools_model.stock_pool_name}" + datas: List[StockPools] = StockPools.query_data( + session=session, + filters=[ + StockPools.timestamp == to_pd_timestamp(target_date), + StockPools.stock_pool_name == create_stock_pools_model.stock_pool_name, + ], + return_type="domain", + ) + if datas: + stock_pool = datas[0] + if create_stock_pools_model.insert_mode == InsertMode.overwrite: + stock_pool.entity_ids = create_stock_pools_model.entity_ids + else: + stock_pool.entity_ids = list(set(stock_pool.entity_ids + create_stock_pools_model.entity_ids)) + else: + stock_pool = StockPools( + entity_id="admin", + timestamp=to_pd_timestamp(target_date), + id=stock_pool_id, + stock_pool_name=create_stock_pools_model.stock_pool_name, + entity_ids=create_stock_pools_model.entity_ids, + ) + session.add(stock_pool) + session.commit() + session.refresh(stock_pool) + return stock_pool + + +def query_stock_tag_stats(query_stock_tag_stats_model: QueryStockTagStatsModel): + with contract_api.DBSession(provider="zvt", data_schema=TagStats)() as session: + datas = TagStats.query_data( + session=session, + filters=[TagStats.stock_pool_name == query_stock_tag_stats_model.stock_pool_name], + order=TagStats.timestamp.desc(), + limit=1, + return_type="domain", + ) + if not datas: + return [] + + target_date = datas[0].timestamp + + tag_stats_list: List[dict] = TagStats.query_data( + session=session, + filters=[ + TagStats.stock_pool_name == query_stock_tag_stats_model.stock_pool_name, + TagStats.timestamp == target_date, + ], + return_type="dict", + order=TagStats.position.asc(), + ) + + if query_stock_tag_stats_model.query_type == TagStatsQueryType.simple: + return tag_stats_list + + entity_ids = flatten_list([tag_stats["entity_ids"] for tag_stats in tag_stats_list]) + + # get stocks meta + stocks = Stock.query_data(provider="em", entity_ids=entity_ids, return_type="domain") + entity_map = {item.entity_id: item for item in stocks} + + # get stock tags + tags_dict = StockTags.query_data( + session=session, + filters=[StockTags.entity_id.in_(entity_ids)], + return_type="dict", + ) + entity_tags_map = {item["entity_id"]: item for item in tags_dict} + + # get stock system tags + system_tags_dict = StockSystemTags.query_data( + session=session, + filters=[StockSystemTags.timestamp == target_date, StockSystemTags.entity_id.in_(entity_ids)], + return_type="dict", + ) + entity_system_tags_map = {item["entity_id"]: item for item in system_tags_dict} + + for tag_stats in tag_stats_list: + stock_details = [] + for entity_id in tag_stats["entity_ids"]: + stock_details_model = { + "entity_id": entity_id, + "main_tag": tag_stats["main_tag"], + "code": entity_map.get(entity_id).code, + "name": entity_map.get(entity_id).name, + } + + stock_tags = entity_tags_map.get(entity_id) + stock_details_model["sub_tag"] = stock_tags["sub_tag"] + if stock_tags["active_hidden_tags"] is not None: + stock_details_model["hidden_tags"] = stock_tags["active_hidden_tags"].keys() + else: + stock_details_model["hidden_tags"] = None + + stock_system_tags = entity_system_tags_map.get(entity_id) + stock_details_model = fill_dict(stock_system_tags, stock_details_model) + + stock_details.append(stock_details_model) + tag_stats["stock_details"] = stock_details + + return tag_stats_list + + +def refresh_main_tag_by_sub_tag(stock_tag: StockTags, set_by_user=False) -> StockTags: + if not stock_tag.sub_tags: + logger.warning(f"{stock_tag.entity_id} has no sub_tags yet") + return stock_tag + + sub_tag = stock_tag.sub_tag + sub_tag_reason = stock_tag.sub_tags[sub_tag] + + main_tag = get_main_tag_by_sub_tag(sub_tag) + main_tag_reason = sub_tag_reason + if main_tag == "其他": + main_tag = stock_tag.main_tag + main_tag_reason = stock_tag.main_tag_reason + + set_stock_tags_model = SetStockTagsModel( + entity_id=stock_tag.entity_id, + main_tag=main_tag, + main_tag_reason=main_tag_reason, + sub_tag=sub_tag, + sub_tag_reason=sub_tag_reason, + active_hidden_tags=stock_tag.active_hidden_tags, + ) + logger.info(f"set_stock_tags_model:{set_stock_tags_model}") + + return build_stock_tags( + set_stock_tags_model=set_stock_tags_model, + timestamp=stock_tag.timestamp, + set_by_user=set_by_user, + keep_current=False, + ) + + +def refresh_all_main_tag_by_sub_tag(): + with contract_api.DBSession(provider="zvt", data_schema=StockTags)() as session: + stock_tags = StockTags.query_data( + session=session, + return_type="domain", + ) + for stock_tag in stock_tags: + refresh_main_tag_by_sub_tag(stock_tag) + + +def reset_to_default_main_tag(current_main_tag: str): + df = StockTags.query_data( + filters=[StockTags.main_tag == current_main_tag], + columns=[StockTags.entity_id], + return_type="df", + ) + entity_ids = df["entity_id"].tolist() + if not entity_ids: + logger.info(f"all stocks with main_tag: {current_main_tag} has been reset") + return + build_default_main_tag(entity_ids=entity_ids, force_rebuild=True) + + +def activate_industry_list(industry_list: List[str]): + df_block = Block.query_data(provider="em", filters=[Block.category == "industry", Block.name.in_(industry_list)]) + industry_codes = df_block["code"].tolist() + block_stocks: List[BlockStock] = BlockStock.query_data( + provider="em", + filters=[BlockStock.code.in_(industry_codes)], + return_type="domain", + ) + entity_ids = [block_stock.stock_id for block_stock in block_stocks] + + if not entity_ids: + logger.info(f"No stocks in {industry_list}") + return + + build_default_main_tag(entity_ids=entity_ids, force_rebuild=True) + + +def activate_sub_tags(activate_sub_tags_model: ActivateSubTagsModel): + sub_tags = activate_sub_tags_model.sub_tags + with contract_api.DBSession(provider="zvt", data_schema=StockTags)() as session: + result = {} + for sub_tag in sub_tags: + # df = StockTags.query_data( + # session=session, + # filters=[StockTags.sub_tag != sub_tag], + # columns=[StockTags.entity_id], + # return_type="df", + # ) + # entity_ids = df["entity_id"].tolist() + entity_ids = None + + # stock_tag with sub_tag but not set to related main_tag yet + stock_tags = StockTags.query_data( + session=session, + entity_ids=entity_ids, + # 需要sqlite3版本>=3.37.0 + filters=[func.json_extract(StockTags.sub_tags, f'$."{sub_tag}"') != None], + return_type="domain", + ) + if not stock_tags: + logger.info(f"all stocks with sub_tag: {sub_tag} has been activated") + continue + for stock_tag in stock_tags: + stock_tag.sub_tag = sub_tag + session.commit() + session.refresh(stock_tag) + result[stock_tag.entity_id] = refresh_main_tag_by_sub_tag(stock_tag, set_by_user=True) + return result + + +def _create_main_tag_if_not_existed(main_tag, main_tag_reason): + main_tag_info = CreateTagInfoModel(tag=main_tag, tag_reason=main_tag_reason) + if not is_tag_info_existed(tag_info=main_tag_info, tag_type=TagType.main_tag): + build_tag_info(tag_info=main_tag_info, tag_type=TagType.main_tag) + + +def get_main_tag_industry_relation(main_tag): + with contract_api.DBSession(provider="zvt", data_schema=StockTags)() as session: + df = IndustryInfo.query_data( + session=session, + columns=[IndustryInfo.industry_name], + filters=[IndustryInfo.main_tag == main_tag], + return_type="df", + ) + return {"main_tag": main_tag, "industry_list": df["industry_name"].tolist()} + + +def get_main_tag_sub_tag_relation(main_tag): + with contract_api.DBSession(provider="zvt", data_schema=StockTags)() as session: + df = SubTagInfo.query_data( + session=session, + columns=[SubTagInfo.tag], + filters=[SubTagInfo.main_tag == main_tag], + return_type="df", + ) + return {"main_tag": main_tag, "sub_tag_list": df["tag"].tolist()} + + +def build_main_tag_industry_relation(main_tag_industry_relation: MainTagIndustryRelation): + with contract_api.DBSession(provider="zvt", data_schema=StockTags)() as session: + main_tag = main_tag_industry_relation.main_tag + _create_main_tag_if_not_existed(main_tag=main_tag, main_tag_reason=main_tag) + + industry_list = main_tag_industry_relation.industry_list + + datas: List[IndustryInfo] = IndustryInfo.query_data( + session=session, + filters=[IndustryInfo.main_tag == main_tag, IndustryInfo.industry_name.notin_(industry_list)], + return_type="domain", + ) + for data in datas: + data.main_tag = "其他" + session.commit() + + industry_info_list: List[IndustryInfo] = IndustryInfo.query_data( + session=session, + filters=[IndustryInfo.industry_name.in_(industry_list)], + return_type="domain", + ) + for industry_info in industry_info_list: + industry_info.main_tag = main_tag + session.commit() + + +def build_main_tag_sub_tag_relation(main_tag_sub_tag_relation: MainTagSubTagRelation): + with contract_api.DBSession(provider="zvt", data_schema=SubTagInfo)() as session: + main_tag = main_tag_sub_tag_relation.main_tag + _create_main_tag_if_not_existed(main_tag=main_tag, main_tag_reason=main_tag) + + sub_tag_list = main_tag_sub_tag_relation.sub_tag_list + + datas: List[SubTagInfo] = SubTagInfo.query_data( + session=session, + filters=[SubTagInfo.main_tag == main_tag, SubTagInfo.tag.notin_(sub_tag_list)], + return_type="domain", + ) + for data in datas: + data.main_tag = "其他" + session.commit() + + sub_tag_info_list: List[SubTagInfo] = SubTagInfo.query_data( + session=session, + filters=[SubTagInfo.tag.in_(sub_tag_list)], + return_type="domain", + ) + for sub_tag_info in sub_tag_info_list: + sub_tag_info.main_tag = main_tag + session.commit() + + +def change_main_tag(change_main_tag_model: ChangeMainTagModel): + new_main_tag = change_main_tag_model.new_main_tag + _create_main_tag_if_not_existed(main_tag=new_main_tag, main_tag_reason=new_main_tag) + with contract_api.DBSession(provider="zvt", data_schema=StockTags)() as session: + stock_tags: List[StockTags] = StockTags.query_data( + filters=[StockTags.main_tag == change_main_tag_model.current_main_tag], + session=session, + return_type="domain", + ) + + for stock_tag in stock_tags: + tag_parameter: TagParameter = build_tag_parameter( + tag_type=TagType.main_tag, + tag=new_main_tag, + tag_reason=new_main_tag, + stock_tag=stock_tag, + ) + set_stock_tags_model = SetStockTagsModel( + entity_id=stock_tag.entity_id, + main_tag=tag_parameter.main_tag, + main_tag_reason=tag_parameter.main_tag_reason, + sub_tag=tag_parameter.sub_tag, + sub_tag_reason=tag_parameter.sub_tag_reason, + active_hidden_tags=stock_tag.active_hidden_tags, + ) + + build_stock_tags( + set_stock_tags_model=set_stock_tags_model, + timestamp=now_pd_timestamp(), + set_by_user=True, + keep_current=False, + ) + session.refresh(stock_tag) + return stock_tags + + +if __name__ == "__main__": + activate_industry_list(industry_list=["半导体"]) + # activate_sub_tags(ActivateSubTagsModel(sub_tags=["航天概念", "天基互联", "北斗导航", "通用航空"])) + + +# the __all__ is generated +__all__ = [ + "stock_tags_need_update", + "get_stock_tag_options", + "build_stock_tags", + "build_tag_parameter", + "batch_set_stock_tags", + "build_default_main_tag", + "build_default_sub_tags", + "get_tag_info_schema", + "is_tag_info_existed", + "build_tag_info", + "build_stock_pool_info", + "build_stock_pool", + "query_stock_tag_stats", + "refresh_main_tag_by_sub_tag", + "refresh_all_main_tag_by_sub_tag", + "reset_to_default_main_tag", + "activate_industry_list", + "activate_sub_tags", +] diff --git a/src/zvt/tag/tag_stats.py b/src/zvt/tag/tag_stats.py new file mode 100644 index 00000000..2ebd108d --- /dev/null +++ b/src/zvt/tag/tag_stats.py @@ -0,0 +1,199 @@ +# -*- coding: utf-8 -*- +import logging +from typing import List + +import pandas as pd +import sqlalchemy + +from zvt.api.kdata import get_kdata_schema +from zvt.contract import AdjustType, IntervalLevel +from zvt.contract.api import df_to_db, get_db_session +from zvt.domain.quotes import Stock1dHfqKdata, KdataCommon +from zvt.factors.top_stocks import TopStocks, get_top_stocks +from zvt.tag.common import InsertMode +from zvt.tag.tag_models import CreateStockPoolsModel +from zvt.tag.tag_schemas import TagStats, StockTags, StockPools +from zvt.tag.tag_service import build_stock_pool +from zvt.utils.pd_utils import pd_is_not_null +from zvt.utils.time_utils import to_pd_timestamp, date_time_by_interval, current_date + +logger = logging.getLogger(__name__) + + +def build_system_stock_pools(): + for stock_pool_name in ["main_line", "vol_up", "大局"]: + datas = StockPools.query_data( + limit=1, + filters=[StockPools.stock_pool_name == stock_pool_name], + order=StockPools.timestamp.desc(), + return_type="domain", + ) + start = None + if datas: + start = date_time_by_interval(datas[0].timestamp) + + df = TopStocks.query_data(start_timestamp=start, columns=[TopStocks.timestamp], order=TopStocks.timestamp.asc()) + if not pd_is_not_null(df): + logger.info(f"no data for top_stocks {start}") + continue + dates = df["timestamp"].tolist() + for target_date in dates: + logger.info(f"build_system_stock_pools {stock_pool_name} to {target_date}") + if stock_pool_name == "main_line": + short_stocks = get_top_stocks(target_date=target_date, return_type="short") + long_stocks = get_top_stocks(target_date=target_date, return_type="long") + entity_ids = list(set(short_stocks + long_stocks)) + elif stock_pool_name == "vol_up": + small_stocks = get_top_stocks(target_date=target_date, return_type="small_vol_up") + big_stocks = get_top_stocks(target_date=target_date, return_type="big_vol_up") + entity_ids = list(set(small_stocks + big_stocks)) + elif stock_pool_name == "大局": + entity_ids = get_top_stocks(target_date=target_date, return_type="all") + else: + assert False + + kdata_df = Stock1dHfqKdata.query_data( + provider="em", + filters=[ + Stock1dHfqKdata.timestamp == to_pd_timestamp(target_date), + Stock1dHfqKdata.entity_id.in_(entity_ids), + ], + columns=["entity_id", "turnover"], + order=Stock1dHfqKdata.turnover.desc(), + ) + entity_ids = kdata_df["entity_id"].tolist() + create_stock_pools_model: CreateStockPoolsModel = CreateStockPoolsModel( + stock_pool_name=stock_pool_name, entity_ids=entity_ids + ) + build_stock_pool(create_stock_pools_model, target_date=target_date) + + +def build_stock_pool_tag_stats( + stock_pool_name, force_rebuild_latest=False, target_date=None, adjust_type=AdjustType.hfq, provider="em" +): + datas = TagStats.query_data( + limit=1, + filters=[TagStats.stock_pool_name == stock_pool_name], + order=TagStats.timestamp.desc(), + return_type="domain", + ) + start = target_date + current_df = None + if datas: + if force_rebuild_latest: + session = get_db_session("zvt", data_schema=TagStats) + session.query(TagStats).filter(TagStats.stock_pool_name == stock_pool_name).filter( + TagStats.timestamp == datas[0].timestamp + ).delete() + session.commit() + return build_stock_pool_tag_stats(stock_pool_name=stock_pool_name, force_rebuild_latest=False) + + latest_tag_stats_timestamp = datas[0].timestamp + current_df = TagStats.query_data( + filters=[TagStats.stock_pool_name == stock_pool_name, TagStats.timestamp == latest_tag_stats_timestamp] + ) + start = date_time_by_interval(latest_tag_stats_timestamp) + + stock_pools: List[StockPools] = StockPools.query_data( + start_timestamp=start, + filters=[StockPools.stock_pool_name == stock_pool_name], + order=StockPools.timestamp.asc(), + return_type="domain", + ) + if not stock_pools: + logger.info(f"no data for {stock_pool_name} {start}") + return + + for stock_pool in stock_pools: + target_date = stock_pool.timestamp + logger.info(f"build_stock_pool_tag_stats for {stock_pool_name} {target_date}") + + entity_ids = stock_pool.entity_ids + tags_df = StockTags.query_data(entity_ids=entity_ids, return_type="df", index="entity_id") + kdata_schema: KdataCommon = get_kdata_schema( + entity_type="stock", level=IntervalLevel.LEVEL_1DAY, adjust_type=adjust_type + ) + kdata_df = kdata_schema.query_data( + provider=provider, + entity_ids=entity_ids, + filters=[kdata_schema.timestamp == to_pd_timestamp(target_date)], + columns=[kdata_schema.entity_id, kdata_schema.name, kdata_schema.turnover], + index="entity_id", + ) + + df = pd.concat([tags_df, kdata_df[["turnover", "name"]]], axis=1) + + grouped_df = ( + df.groupby("main_tag") + .agg( + turnover=("turnover", "sum"), + entity_count=("entity_id", "count"), + entity_ids=("entity_id", lambda entity_id: list(entity_id)), + ) + .reset_index() + ) + sorted_df = grouped_df.sort_values(by=["turnover", "entity_count"], ascending=[False, False]) + sorted_df = sorted_df.reset_index(drop=True) + sorted_df["position"] = sorted_df.index + sorted_df["is_main_line"] = sorted_df.index < 5 + sorted_df["main_line_continuous_days"] = sorted_df["is_main_line"].apply(lambda x: 1 if x else 0) + # logger.info(f"current_df\n: {current_df}") + if pd_is_not_null(current_df): + sorted_df.set_index("main_tag", inplace=True, drop=False) + current_df.set_index("main_tag", inplace=True, drop=False) + common_index = sorted_df[sorted_df["is_main_line"]].index.intersection( + current_df[current_df["is_main_line"]].index + ) + pre_selected = current_df.loc[common_index] + if pd_is_not_null(pre_selected): + pre_selected = pre_selected.reindex(sorted_df.index, fill_value=0) + sorted_df["main_line_continuous_days"] = ( + sorted_df["main_line_continuous_days"] + pre_selected["main_line_continuous_days"] + ) + sorted_df["entity_id"] = "admin" + sorted_df["timestamp"] = target_date + sorted_df["stock_pool_name"] = stock_pool_name + sorted_df["id"] = sorted_df[["entity_id", "timestamp", "stock_pool_name", "main_tag"]].apply( + lambda x: "_".join(x.astype(str)), axis=1 + ) + df_to_db( + provider="zvt", + df=sorted_df, + data_schema=TagStats, + force_update=True, + dtype={"entity_ids": sqlalchemy.JSON}, + ) + current_df = sorted_df + + +def build_stock_pool_and_tag_stats( + stock_pool_name, + entity_ids, + insert_mode=InsertMode.append, + target_date=current_date(), + provider="em", + adjust_type=AdjustType.hfq, +): + create_stock_pools_model: CreateStockPoolsModel = CreateStockPoolsModel( + stock_pool_name=stock_pool_name, entity_ids=entity_ids, insert_mode=insert_mode + ) + + build_stock_pool(create_stock_pools_model, target_date=target_date) + + build_stock_pool_tag_stats( + stock_pool_name=stock_pool_name, + force_rebuild_latest=True, + target_date=target_date, + adjust_type=adjust_type, + provider=provider, + ) + + +if __name__ == "__main__": + # build_system_stock_pools() + build_stock_pool_tag_stats(stock_pool_name="main_line", force_rebuild_latest=True) + # build_stock_pool_tag_stats(stock_pool_name="vol_up") + + +# the __all__ is generated +__all__ = ["build_system_stock_pools", "build_stock_pool_tag_stats", "build_stock_pool_and_tag_stats"] diff --git a/src/zvt/tag/tag_utils.py b/src/zvt/tag/tag_utils.py new file mode 100644 index 00000000..853bfb8c --- /dev/null +++ b/src/zvt/tag/tag_utils.py @@ -0,0 +1,319 @@ +# -*- coding: utf-8 -*- +import json +import os +from typing import List, Dict + +import pandas as pd + +from zvt import zvt_env +from zvt.contract.api import df_to_db +from zvt.domain import Block +from zvt.tag.common import StockPoolType +from zvt.tag.tag_schemas import MainTagInfo, SubTagInfo, HiddenTagInfo, StockPoolInfo, IndustryInfo + + +def _get_default_industry_main_tag_mapping() -> Dict[str, str]: + with open(os.path.join(zvt_env["resource_path"], "industry_main_tag_mapping.json"), encoding="utf-8") as f: + return json.load(f) + + +def _get_default_main_tag_industry_mapping() -> Dict[str, List[str]]: + mapping = _get_default_industry_main_tag_mapping() + result = {} + for industry, main_tag in mapping.items(): + result.setdefault(main_tag, []) + result.get(main_tag).append(industry) + return result + + +def _get_default_concept_main_tag_mapping() -> Dict[str, str]: + with open(os.path.join(zvt_env["resource_path"], "concept_main_tag_mapping.json"), encoding="utf-8") as f: + return json.load(f) + + +def _get_default_main_tag_concept_mapping() -> Dict[str, List[str]]: + mapping = _get_default_concept_main_tag_mapping() + result = {} + for concept, main_tag in mapping.items(): + result.setdefault(main_tag, []) + result.get(main_tag).append(concept) + return result + + +def _get_initial_sub_tags() -> List[str]: + return list(_get_default_concept_main_tag_mapping().keys()) + + +def _get_industry_list(): + df = Block.query_data( + filters=[Block.category == "industry"], columns=[Block.name], return_type="df", order=Block.timestamp.desc() + ) + return df["name"].tolist() + + +def _get_concept_list(): + df = Block.query_data( + filters=[Block.category == "concept"], columns=[Block.name], return_type="df", order=Block.timestamp.desc() + ) + + return df["name"].tolist() + + +def _check_missed_industry(): + current_industry_list = _get_default_industry_main_tag_mapping().keys() + return list(set(_get_industry_list()) - set(current_industry_list)) + + +def _check_missed_concept(): + current_concept_list = _get_default_concept_main_tag_mapping().keys() + return list(set(_get_concept_list()) - set(current_concept_list)) + + +def _get_initial_main_tag_info(): + timestamp = "2024-03-25" + entity_id = "admin" + + from_industry = [ + { + "id": f"{entity_id}_{main_tag}", + "entity_id": entity_id, + "timestamp": timestamp, + "tag": main_tag, + "tag_reason": f"来自这些行业:{industry}", + } + for main_tag, industry in _get_default_main_tag_industry_mapping().items() + ] + + from_concept = [] + for tag, concepts in _get_default_main_tag_concept_mapping().items(): + if tag not in _get_default_main_tag_industry_mapping(): + from_concept.append( + { + "id": f"{entity_id}_{tag}", + "entity_id": entity_id, + "timestamp": timestamp, + "tag": tag, + "tag_reason": f"来自这些概念:{','.join(concepts)}", + } + ) + + return from_industry + from_concept + + +def _get_initial_industry_info(): + timestamp = "2024-03-25" + entity_id = "admin" + industry_info = [ + { + "id": f"{entity_id}_{industry}", + "entity_id": entity_id, + "timestamp": timestamp, + "industry_name": industry, + "description": industry, + "main_tag": main_tag, + } + for industry, main_tag in _get_default_industry_main_tag_mapping().items() + ] + return industry_info + + +def _get_initial_sub_tag_info(): + timestamp = "2024-03-25" + entity_id = "admin" + + return [ + { + "id": f"{entity_id}_{sub_tag}", + "entity_id": entity_id, + "timestamp": timestamp, + "tag": sub_tag, + "tag_reason": sub_tag, + "main_tag": main_tag, + } + for sub_tag, main_tag in _get_default_concept_main_tag_mapping().items() + ] + + +def _get_initial_stock_pool_info(): + timestamp = "2024-03-25" + entity_id = "admin" + return [ + { + "id": f"{entity_id}_{stock_pool_name}", + "entity_id": entity_id, + "timestamp": timestamp, + "stock_pool_type": StockPoolType.system.value, + "stock_pool_name": stock_pool_name, + } + for stock_pool_name in ["main_line", "vol_up", "大局", "all"] + ] + + +_hidden_tags = { + "中字头": "央企,国资委控股", + "核心资产": "高ROE 高现金流 高股息 低应收 低资本开支 低财务杠杆 有增长", + "高股息": "高股息", + "微盘股": "市值50亿以下", + "次新股": "上市未满两年", +} + + +def _get_initial_hidden_tag_info(): + timestamp = "2024-03-25" + entity_id = "admin" + return [ + { + "id": f"{entity_id}_{tag}", + "entity_id": entity_id, + "timestamp": timestamp, + "tag": tag, + "tag_reason": tag_reason, + } + for tag, tag_reason in _hidden_tags.items() + ] + + +def build_initial_main_tag_info(): + main_tag_info_list = _get_initial_main_tag_info() + df = pd.DataFrame.from_records(main_tag_info_list) + df_to_db(df=df, data_schema=MainTagInfo, provider="zvt", force_update=False) + + +def build_initial_industry_info(): + initial_industry_info = _get_initial_industry_info() + df = pd.DataFrame.from_records(initial_industry_info) + df_to_db(df=df, data_schema=IndustryInfo, provider="zvt", force_update=False) + + +def build_initial_sub_tag_info(force_update=False): + sub_tag_info_list = _get_initial_sub_tag_info() + df = pd.DataFrame.from_records(sub_tag_info_list) + df_to_db(df=df, data_schema=SubTagInfo, provider="zvt", force_update=force_update) + + +def build_initial_stock_pool_info(): + stock_pool_info_list = _get_initial_stock_pool_info() + df = pd.DataFrame.from_records(stock_pool_info_list) + df_to_db(df=df, data_schema=StockPoolInfo, provider="zvt", force_update=False) + + +def build_initial_hidden_tag_info(): + hidden_tag_info_list = _get_initial_hidden_tag_info() + df = pd.DataFrame.from_records(hidden_tag_info_list) + df_to_db(df=df, data_schema=HiddenTagInfo, provider="zvt", force_update=False) + + +def get_main_tags(): + df = MainTagInfo.query_data(columns=[MainTagInfo.tag]) + return df["tag"].tolist() + + +def get_main_tag_by_sub_tag(sub_tag): + datas: List[SubTagInfo] = SubTagInfo.query_data(filters=[SubTagInfo.tag == sub_tag], return_type="domain") + if datas: + return datas[0].main_tag + else: + return _get_default_concept_main_tag_mapping().get(sub_tag, "其他") + + +def get_main_tag_by_industry(industry_name): + datas: List[IndustryInfo] = IndustryInfo.query_data( + filters=[IndustryInfo.industry_name == industry_name], return_type="domain" + ) + if datas: + return datas[0].main_tag + else: + _get_default_industry_main_tag_mapping().get(industry_name, "其他") + + +def get_sub_tags(): + df = SubTagInfo.query_data(columns=[SubTagInfo.tag]) + return df["tag"].tolist() + + +def get_hidden_tags(): + df = HiddenTagInfo.query_data(columns=[HiddenTagInfo.tag]) + return df["tag"].tolist() + + +def get_stock_pool_names(): + df = StockPoolInfo.query_data(columns=[StockPoolInfo.stock_pool_name]) + return df["stock_pool_name"].tolist() + + +def match_tag_by_type(alias, tag_type="main_tag"): + if tag_type == "main_tag": + tags = get_main_tags() + elif tag_type == "sub_tag": + tags = get_sub_tags() + elif tag_type == "industry": + tags = _get_industry_list() + else: + assert False + + max_intersection_length = 0 + max_tag = None + + for tag in tags: + intersection_length = len(set(alias) & set(tag)) + # at least 2 same chars + if intersection_length < 2: + continue + + if intersection_length > max_intersection_length: + max_intersection_length = intersection_length + max_tag = tag + + return max_tag + + +def match_tag(alias): + tag = match_tag_by_type(alias, tag_type="main_tag") + if tag: + return "main_tag", tag + + tag = match_tag_by_type(alias, tag_type="sub_tag") + if tag: + return "sub_tag", tag + + tag = match_tag_by_type(alias, tag_type="industry") + if tag: + return "main_tag", get_main_tag_by_industry(tag) + + return "new_tag", alias + + +if __name__ == "__main__": + # with open("missed_concept.json", "w") as json_file: + # json.dump(check_missed_concept(), json_file, indent=2, ensure_ascii=False) + # with open("missed_industry.json", "w") as json_file: + # json.dump(check_missed_industry(), json_file, indent=2, ensure_ascii=False) + # print(industry_to_main_tag("光伏设备")) + # result = {} + # for main_tag, concepts in get_main_tag_industry_mapping().items(): + # for tag in concepts: + # result[tag] = main_tag + # with open("industry_main_tag_mapping.json", "w") as json_file: + # json.dump(result, json_file, indent=2, ensure_ascii=False) + # build_initial_stock_pool_info() + # build_initial_main_tag_info() + build_initial_sub_tag_info(force_update=True) + build_initial_industry_info() + + +# the __all__ is generated +__all__ = [ + "build_initial_main_tag_info", + "build_initial_industry_info", + "build_initial_sub_tag_info", + "build_initial_stock_pool_info", + "build_initial_hidden_tag_info", + "get_main_tags", + "get_main_tag_by_sub_tag", + "get_main_tag_by_industry", + "get_sub_tags", + "get_hidden_tags", + "get_stock_pool_names", + "match_tag_by_type", + "match_tag", +] diff --git a/src/zvt/tag/tagger.py b/src/zvt/tag/tagger.py new file mode 100644 index 00000000..4b0590b3 --- /dev/null +++ b/src/zvt/tag/tagger.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +import logging +from typing import Type + +from zvt.contract import Mixin +from zvt.contract import TradableEntity +from zvt.contract.api import get_db_session +from zvt.contract.base_service import OneStateService +from zvt.contract.zvt_info import TaggerState +from zvt.domain import Stock +from zvt.tag.tag_schemas import StockTags + +logger = logging.getLogger(__name__) + + +class Tagger(OneStateService): + state_schema = TaggerState + + entity_schema: Type[TradableEntity] = None + + data_schema: Type[Mixin] = None + + start_timestamp = "2018-01-01" + + def __init__(self, force=False) -> None: + super().__init__() + assert self.entity_schema is not None + assert self.data_schema is not None + self.force = force + self.session = get_db_session(provider="zvt", data_schema=self.data_schema) + if self.state and not self.force: + logger.info(f"get start_timestamp from state") + self.start_timestamp = self.state["current_timestamp"] + logger.info(f"tag start_timestamp: {self.start_timestamp}") + + def tag(self): + raise NotImplementedError + + +class StockTagger(Tagger): + data_schema = StockTags + entity_schema = Stock + + def tag(self): + raise NotImplementedError + + +# the __all__ is generated +__all__ = ["Tagger", "StockTagger"] diff --git a/src/zvt/tasks/init_tag_system.py b/src/zvt/tasks/init_tag_system.py new file mode 100644 index 00000000..aa11fca1 --- /dev/null +++ b/src/zvt/tasks/init_tag_system.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- +from zvt.domain import Block, BlockStock, Stock +from zvt.tag.tag_service import build_default_main_tag, build_default_sub_tags +from zvt.tag.tag_utils import ( + build_initial_stock_pool_info, + build_initial_main_tag_info, + build_initial_sub_tag_info, + build_initial_industry_info, +) +from zvt.trading.trading_service import build_default_query_stock_quote_setting + +if __name__ == "__main__": + # init industry info + build_initial_industry_info() + + # init tag info + build_initial_main_tag_info() + build_initial_sub_tag_info() + build_initial_stock_pool_info() + build_default_query_stock_quote_setting() + + Stock.record_data(provider="em") + Block.record_data(provider="em", sleeping_time=0) + BlockStock.record_data(provider="em", sleeping_time=0) + # init default main tag + build_default_main_tag() + + # init default sub tags + build_default_sub_tags() diff --git a/src/zvt/tasks/qmt_data_runner.py b/src/zvt/tasks/qmt_data_runner.py new file mode 100644 index 00000000..221256b9 --- /dev/null +++ b/src/zvt/tasks/qmt_data_runner.py @@ -0,0 +1,87 @@ +# -*- coding: utf-8 -*- +import logging +import time + +import pandas as pd +from xtquant import xtdata + +from zvt import init_log +from zvt.broker.qmt.qmt_quote import get_qmt_stocks +from zvt.contract import AdjustType +from zvt.recorders.qmt.meta import QMTStockRecorder +from zvt.recorders.qmt.quotes import QMTStockKdataRecorder + +logger = logging.getLogger(__name__) + + +def download_data(download_tick=False): + period = "1d" + xtdata.download_sector_data() + stock_codes = get_qmt_stocks() + stock_codes = sorted(stock_codes) + count = len(stock_codes) + download_status = {"ok": False} + + def update_progress(data, download_status: dict = download_status): + logger.info(data) + finished = data["finished"] + total = data["total"] + download_status["finished"] = finished + download_status["total"] = total + if finished == total: + download_status["ok"] = True + + start_time = time.time() + + xtdata.download_history_data2(stock_list=stock_codes, period=period, callback=update_progress) + + while True: + logger.info(f"current download_status:{download_status}") + if download_status["ok"]: + logger.info(f"finish download 1d kdata") + break + cost_time = time.time() - start_time + if cost_time >= 60 * 30: + logger.info(f"timeout download 1d kdata") + break + time.sleep(10) + + QMTStockRecorder().run() + QMTStockKdataRecorder(adjust_type=AdjustType.qfq, sleeping_time=0).run() + + xtdata.download_financial_data2( + stock_list=stock_codes, table_list=["Capital"], start_time="", end_time="", callback=lambda x: print(x) + ) + logger.info("download capital data ok") + + if download_tick: + for index, stock_code in enumerate(stock_codes): + logger.info(f"run to {index + 1}/{count}") + + records = xtdata.get_market_data( + stock_list=[stock_code], + period=period, + count=5, + dividend_type="front", + fill_data=False, + ) + dfs = [] + for col in records: + df = records[col].T + df.columns = [col] + dfs.append(df) + kdatas = pd.concat(dfs, axis=1) + start_time = kdatas.index.to_list()[0] + xtdata.download_history_data(stock_code, period="tick", start_time=start_time) + logger.info(f"download {stock_code} tick from {start_time} ok") + + +if __name__ == "__main__": + init_log("qmt_data_runner.log") + from apscheduler.schedulers.background import BackgroundScheduler + + sched = BackgroundScheduler() + download_data() + sched.add_job(func=download_data, trigger="cron", hour=15, minute=30, day_of_week="mon-fri") + sched.start() + sched._thread.join() diff --git a/src/zvt/tasks/qmt_tick_runner.py b/src/zvt/tasks/qmt_tick_runner.py new file mode 100644 index 00000000..bd9e867b --- /dev/null +++ b/src/zvt/tasks/qmt_tick_runner.py @@ -0,0 +1,13 @@ +# -*- coding: utf-8 -*- +from zvt import init_log +from zvt.broker.qmt.qmt_quote import record_tick + +if __name__ == "__main__": + init_log("qmt_tick_runner.log") + from apscheduler.schedulers.background import BackgroundScheduler + + sched = BackgroundScheduler() + record_tick() + sched.add_job(func=record_tick, trigger="cron", hour=9, minute=18, day_of_week="mon-fri") + sched.start() + sched._thread.join() diff --git a/src/zvt/tasks/stock_pool_runner.py b/src/zvt/tasks/stock_pool_runner.py new file mode 100644 index 00000000..e1649cdd --- /dev/null +++ b/src/zvt/tasks/stock_pool_runner.py @@ -0,0 +1,132 @@ +# -*- coding: utf-8 -*- +import logging + +from apscheduler.schedulers.background import BackgroundScheduler + +from zvt import zvt_config, init_log +from zvt.api.selector import get_entity_ids_by_filter +from zvt.domain import ( + Stock, + Stock1dHfqKdata, + Stockhk, + Stockhk1dHfqKdata, + Block, + Block1dKdata, + BlockCategory, + Index, + Index1dKdata, + LimitUpInfo, +) +from zvt.factors import compute_top_stocks +from zvt.informer import EmailInformer +from zvt.informer.inform_utils import inform_email +from zvt.tag.tag_stats import build_system_stock_pools, build_stock_pool_tag_stats +from zvt.utils.recorder_utils import run_data_recorder +from zvt.utils.time_utils import current_date + +logger = logging.getLogger(__name__) + +sched = BackgroundScheduler() + +email_informer = EmailInformer() + + +def report_limit_up(): + latest_data = LimitUpInfo.query_data(order=LimitUpInfo.timestamp.desc(), limit=1, return_type="domain") + timestamp = latest_data[0].timestamp + df = LimitUpInfo.query_data(start_timestamp=timestamp, end_timestamp=timestamp, columns=["code", "name", "reason"]) + df["reason"] = df["reason"].str.split("+") + print(df) + email_informer.send_message(zvt_config["email_username"], f"{timestamp} 热门报告", f"{df}") + + +def record_stock_data(data_provider="em", entity_provider="em", sleeping_time=0): + # 涨停数据 + run_data_recorder(domain=LimitUpInfo, data_provider=None, force_update=False) + report_limit_up() + + # A股指数 + run_data_recorder(domain=Index, data_provider=data_provider, force_update=False) + # A股指数行情 + run_data_recorder( + domain=Index1dKdata, + data_provider=data_provider, + entity_provider=entity_provider, + day_data=True, + sleeping_time=sleeping_time, + ) + + # 板块(概念,行业) + run_data_recorder(domain=Block, entity_provider=entity_provider, data_provider=entity_provider, force_update=False) + # 板块行情(概念,行业) + run_data_recorder( + domain=Block1dKdata, + entity_provider=entity_provider, + data_provider=entity_provider, + day_data=True, + sleeping_time=sleeping_time, + ) + + # 报告新概念和行业 + df = Block.query_data( + filters=[Block.category == BlockCategory.concept.value], + order=Block.list_date.desc(), + index="entity_id", + limit=7, + ) + + inform_email( + entity_ids=df.index.tolist(), entity_type="block", target_date=current_date(), title="report 新概念", provider="em" + ) + + # A股标的 + run_data_recorder(domain=Stock, data_provider=data_provider, force_update=False) + # A股后复权行情 + normal_stock_ids = get_entity_ids_by_filter( + provider="em", ignore_delist=True, ignore_st=False, ignore_new_stock=False + ) + + run_data_recorder( + entity_ids=normal_stock_ids, + domain=Stock1dHfqKdata, + data_provider=data_provider, + entity_provider=entity_provider, + day_data=True, + sleeping_time=sleeping_time, + return_unfinished=True, + ) + + +def record_stockhk_data(data_provider="em", entity_provider="em", sleeping_time=2): + # 港股标的 + run_data_recorder(domain=Stockhk, data_provider=data_provider, force_update=False) + # 港股后复权行情 + df = Stockhk.query_data(filters=[Stockhk.south == True], index="entity_id") + run_data_recorder( + domain=Stockhk1dHfqKdata, + entity_ids=df.index.tolist(), + data_provider=data_provider, + entity_provider=entity_provider, + day_data=True, + sleeping_time=sleeping_time, + ) + + +def record_data_and_build_stock_pools(): + # 获取 涨停 指数 板块(概念) 个股行情数据 + record_stock_data() + + # 计算短期/中期最强 放量突破年线半年线个股 + compute_top_stocks() + # 放入股票池 + build_system_stock_pools() + for stock_pool_name in ["main_line", "vol_up", "大局"]: + build_stock_pool_tag_stats(stock_pool_name=stock_pool_name, force_rebuild_latest=True) + + +if __name__ == "__main__": + init_log("sotck_pool_runner.log") + record_data_and_build_stock_pools() + sched.add_job(func=record_data_and_build_stock_pools, trigger="cron", hour=16, minute=00, day_of_week="mon-fri") + sched.start() + sched._thread.join() diff --git a/src/zvt/tasks/today_shoot_runner.py b/src/zvt/tasks/today_shoot_runner.py new file mode 100644 index 00000000..d7918eba --- /dev/null +++ b/src/zvt/tasks/today_shoot_runner.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +import logging +import time + +import eastmoneypy +from apscheduler.schedulers.background import BackgroundScheduler + +from zvt import init_log +from zvt.api.selector import get_shoot_today +from zvt.domain import Stock +from zvt.informer.inform_utils import add_to_eastmoney +from zvt.tag.common import InsertMode +from zvt.tag.tag_stats import build_stock_pool_and_tag_stats +from zvt.utils.time_utils import now_pd_timestamp, current_date + +logger = logging.getLogger(__name__) + + +sched = BackgroundScheduler() + + +def calculate_top(): + try: + eastmoneypy.del_group("今日异动") + except: + pass + while True: + current_timestamp = now_pd_timestamp() + + if not Stock.in_trading_time(): + logger.info(f"calculate shoots finished at: {current_timestamp}") + break + + if Stock.in_trading_time() and not Stock.in_real_trading_time(): + logger.info(f"Sleeping time......") + time.sleep(60 * 1) + continue + + target_date = current_date() + shoot_up, shoot_down = get_shoot_today() + + shoots = shoot_up + shoot_down + if shoots: + build_stock_pool_and_tag_stats( + entity_ids=shoots, + stock_pool_name="今日异动", + insert_mode=InsertMode.append, + target_date=target_date, + provider="qmt", + ) + add_to_eastmoney(codes=[entity_id.split("_")[2] for entity_id in shoots], group="今日异动", over_write=False) + + logger.info(f"Sleep 1 minutes to compute {target_date} shoots tag stats") + time.sleep(60 * 1) + + +if __name__ == "__main__": + init_log("today_shoot_runner.log") + calculate_top() + sched.add_job(func=calculate_top, trigger="cron", hour=9, minute=30, day_of_week="mon-fri") + sched.start() + sched._thread.join() diff --git a/src/zvt/tasks/today_top_runner.py b/src/zvt/tasks/today_top_runner.py new file mode 100644 index 00000000..90d4249d --- /dev/null +++ b/src/zvt/tasks/today_top_runner.py @@ -0,0 +1,102 @@ +# -*- coding: utf-8 -*- +import logging +import time +from typing import List + +import eastmoneypy +from apscheduler.schedulers.background import BackgroundScheduler + +from zvt import init_log +from zvt.api.selector import get_top_up_today, get_top_down_today, get_top_vol +from zvt.domain import Stock +from zvt.informer.inform_utils import add_to_eastmoney +from zvt.recorders.em.em_api import record_hot_topic +from zvt.tag.common import InsertMode +from zvt.tag.tag_schemas import StockPools +from zvt.tag.tag_stats import build_stock_pool_and_tag_stats +from zvt.utils.time_utils import now_pd_timestamp, current_date + +logger = logging.getLogger(__name__) + + +sched = BackgroundScheduler() + + +def calculate_top(clear_em=True): + if clear_em: + try: + eastmoneypy.del_group("今日强势") + except: + pass + + seed = 0 + add_all_to_em = False + while True: + current_timestamp = now_pd_timestamp() + + if not Stock.in_trading_time(): + logger.info(f"calculate top finished at: {current_timestamp}") + break + + if Stock.in_trading_time() and not Stock.in_real_trading_time(): + logger.info(f"Sleeping time......") + time.sleep(60 * 1) + continue + + if seed == 0: + record_hot_topic() + seed = seed + 1 + if seed == 5: + seed = 0 + target_date = current_date() + top_up_entity_ids = get_top_up_today() + if top_up_entity_ids: + build_stock_pool_and_tag_stats( + entity_ids=top_up_entity_ids, + stock_pool_name="今日强势", + insert_mode=InsertMode.append, + target_date=target_date, + provider="qmt", + ) + try: + to_added = top_up_entity_ids + if add_all_to_em: + stock_pools: List[StockPools] = StockPools.query_data( + filters=[StockPools.stock_pool_name == "今日强势"], + order=StockPools.timestamp.desc(), + limit=1, + return_type="domain", + ) + if stock_pools: + to_added = stock_pools[0].entity_ids + if len(to_added) > 500: + to_added = get_top_vol(entity_ids=to_added, limit=500) + + add_to_eastmoney( + codes=[entity_id.split("_")[2] for entity_id in to_added], group="今日强势", over_write=False + ) + add_all_to_em = False + except Exception as e: + logger.error(e) + add_all_to_em = True + + top_down_entity_ids = get_top_down_today() + if top_down_entity_ids: + build_stock_pool_and_tag_stats( + entity_ids=top_down_entity_ids, + stock_pool_name="今日弱势", + insert_mode=InsertMode.append, + target_date=target_date, + provider="qmt", + ) + + logger.info(f"Sleep 2 minutes to compute {target_date} top stock tag stats") + time.sleep(60 * 2) + + +if __name__ == "__main__": + init_log("today_top_runner.log") + calculate_top(clear_em=False) + sched.add_job(func=calculate_top, trigger="cron", hour=9, minute=26, day_of_week="mon-fri") + sched.start() + sched._thread.join() diff --git a/src/zvt/trader/__init__.py b/src/zvt/trader/__init__.py new file mode 100644 index 00000000..3a9e30e5 --- /dev/null +++ b/src/zvt/trader/__init__.py @@ -0,0 +1,163 @@ +# -*- coding: utf-8 -*- +from enum import Enum +from typing import Union, List + +import pandas as pd + +from zvt.contract import IntervalLevel +from zvt.utils.decorator import to_string + + +class TradingSignalType(Enum): + open_long = "open_long" + open_short = "open_short" + keep_long = "keep_long" + keep_short = "keep_short" + close_long = "close_long" + close_short = "close_short" + + +class OrderType(Enum): + order_long = "order_long" + order_short = "order_short" + order_close_long = "order_close_long" + order_close_short = "order_close_short" + + +def trading_signal_type_to_order_type(trading_signal_type): + if trading_signal_type == TradingSignalType.open_long: + return OrderType.order_long + elif trading_signal_type == TradingSignalType.open_short: + return OrderType.order_short + elif trading_signal_type == TradingSignalType.close_long: + return OrderType.order_close_long + elif trading_signal_type == TradingSignalType.close_short: + return OrderType.order_close_short + + +@to_string +class TradingSignal: + def __init__( + self, + entity_id: str, + due_timestamp: Union[str, pd.Timestamp], + happen_timestamp: Union[str, pd.Timestamp], + trading_level: IntervalLevel, + trading_signal_type: TradingSignalType, + position_pct: float = None, + order_money: float = None, + order_amount: int = None, + ): + """ + + :param entity_id: the entity id + :param due_timestamp: the signal due time + :param happen_timestamp: the time when generating the signal + :param trading_level: the level + :param trading_signal_type: + :param position_pct: percentage of account to order + :param order_money: money to order + :param order_amount: amount to order + """ + self.entity_id = entity_id + self.due_timestamp = due_timestamp + self.happen_timestamp = happen_timestamp + self.trading_level = trading_level + self.trading_signal_type = trading_signal_type + + if len([x for x in (position_pct, order_money, order_amount) if x is not None]) != 1: + assert False + # use position_pct or order_money or order_amount + self.position_pct = position_pct + # when close the position,just use position_pct + self.order_money = order_money + self.order_amount = order_amount + + +class TradingListener(object): + def on_trading_open(self, timestamp): + raise NotImplementedError + + def on_trading_signals(self, trading_signals: List[TradingSignal]): + raise NotImplementedError + + def on_trading_close(self, timestamp): + raise NotImplementedError + + def on_trading_finish(self, timestamp): + raise NotImplementedError + + def on_trading_error(self, timestamp, error): + raise NotImplementedError + + +class AccountService(TradingListener): + def get_positions(self): + pass + + def get_current_position(self, entity_id, create_if_not_exist=False): + """ + overwrite it to provide your real position + + :param entity_id: + """ + pass + + def get_current_account(self): + pass + + def order_by_position_pct( + self, + entity_id, + order_price, + order_timestamp, + order_type, + order_position_pct: float, + ): + pass + + def order_by_money( + self, + entity_id, + order_price, + order_timestamp, + order_type, + order_money, + ): + pass + + def order_by_amount( + self, + entity_id, + order_price, + order_timestamp, + order_type, + order_amount, + ): + pass + + +# the __all__ is generated +__all__ = ["TradingSignalType", "TradingListener", "OrderType", "AccountService", "trading_signal_type_to_order_type"] + +# __init__.py structure: +# common code of the package +# export interface in __all__ which contains __all__ of its sub modules + +# import all from submodule trader +from .trader import * +from .trader import __all__ as _trader_all + +__all__ += _trader_all + +# import all from submodule errors +from .errors import * +from .errors import __all__ as _errors_all + +__all__ += _errors_all + +# import all from submodule account +from .sim_account import * +from .sim_account import __all__ as _account_all + +__all__ += _account_all diff --git a/zvt/trader/errors.py b/src/zvt/trader/errors.py similarity index 81% rename from zvt/trader/errors.py rename to src/zvt/trader/errors.py index ff354078..117f4a38 100644 --- a/zvt/trader/errors.py +++ b/src/zvt/trader/errors.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- class TraderError(Exception): """Base class for exceptions in this module.""" + pass @@ -27,5 +28,14 @@ def __init__(self, message="invalid order"): class WrongKdataError(TraderError): def __init__(self, message="wrong kdata"): self.message = message + + # the __all__ is generated -__all__ = ['TraderError', 'InvalidOrderParamError', 'NotEnoughMoneyError', 'NotEnoughPositionError', 'InvalidOrderError', 'WrongKdataError'] \ No newline at end of file +__all__ = [ + "TraderError", + "InvalidOrderParamError", + "NotEnoughMoneyError", + "NotEnoughPositionError", + "InvalidOrderError", + "WrongKdataError", +] diff --git a/src/zvt/trader/sim_account.py b/src/zvt/trader/sim_account.py new file mode 100644 index 00000000..cf54d06c --- /dev/null +++ b/src/zvt/trader/sim_account.py @@ -0,0 +1,551 @@ +# -*- coding: utf-8 -*- +import logging +import math +from typing import List, Optional + +from zvt.api.kdata import get_kdata, get_kdata_schema +from zvt.contract import IntervalLevel, TradableEntity, AdjustType +from zvt.contract.api import get_db_session, decode_entity_id +from zvt.trader import TradingSignal, AccountService, OrderType, trading_signal_type_to_order_type +from zvt.trader.errors import ( + NotEnoughMoneyError, + InvalidOrderError, + NotEnoughPositionError, + InvalidOrderParamError, + WrongKdataError, +) +from zvt.trader.trader_info_api import get_trader_info, clear_trader +from zvt.trader.trader_models import AccountStatsModel, PositionModel +from zvt.trader.trader_schemas import AccountStats, Position, Order, TraderInfo +from zvt.utils.pd_utils import pd_is_not_null +from zvt.utils.time_utils import to_pd_timestamp, to_time_str, TIME_FORMAT_ISO8601, is_same_date +from zvt.utils.utils import fill_domain_from_dict + + +class SimAccountService(AccountService): + def __init__( + self, + entity_schema: TradableEntity, + trader_name, + timestamp, + provider=None, + level=IntervalLevel.LEVEL_1DAY, + base_capital=1000000, + buy_cost=0.001, + sell_cost=0.001, + slippage=0.001, + rich_mode=True, + adjust_type: AdjustType = None, + keep_history=False, + real_time=False, + kdata_use_begin_time=False, + ): + self.logger = logging.getLogger(self.__class__.__name__) + + self.entity_schema = entity_schema + self.base_capital = base_capital + self.buy_cost = buy_cost + self.sell_cost = sell_cost + self.slippage = slippage + self.rich_mode = rich_mode + self.adjust_type = adjust_type + self.trader_name = trader_name + + self.session = get_db_session("zvt", data_schema=TraderInfo) + self.provider = provider + self.level = level + self.start_timestamp = timestamp + self.keep_history = keep_history + self.real_time = real_time + self.kdata_use_begin_time = kdata_use_begin_time + + self.account = self.init_account() + + account_info = ( + f"init_account,holding size:{len(self.account.positions)} profit:{self.account.profit} input_money:{self.account.input_money} " + f"cash:{self.account.cash} value:{self.account.value} all_value:{self.account.all_value}" + ) + self.logger.info(account_info) + + def input_money(self, money=1000000): + self.account.input_money += money + self.account.cash += money + + def clear_account(self): + trader_info = get_trader_info(session=self.session, trader_name=self.trader_name, return_type="domain", limit=1) + + if trader_info: + self.logger.warning("trader:{} has run before,old result would be deleted".format(self.trader_name)) + clear_trader(session=self.session, trader_name=self.trader_name) + + def init_account(self) -> AccountStats: + # 清除历史数据 + if not self.keep_history: + self.clear_account() + + # 读取之前保存的账户 + if self.keep_history: + self.account = self.load_account() + if self.account: + return self.account + + # init trader info + entity_type = self.entity_schema.__name__.lower() + sim_account = TraderInfo( + id=self.trader_name, + entity_id=f"trader_zvt_{self.trader_name}", + timestamp=self.start_timestamp, + trader_name=self.trader_name, + entity_type=entity_type, + start_timestamp=self.start_timestamp, + provider=self.provider, + level=self.level.value, + real_time=self.real_time, + kdata_use_begin_time=self.kdata_use_begin_time, + kdata_adjust_type=self.adjust_type.value, + ) + self.session.add(sim_account) + self.session.commit() + + return AccountStats( + entity_id=f"trader_zvt_{self.trader_name}", + timestamp=self.start_timestamp, + trader_name=self.trader_name, + cash=self.base_capital, + input_money=self.base_capital, + all_value=self.base_capital, + value=0, + closing=False, + ) + + def load_account(self) -> AccountStats: + records = AccountStats.query_data( + filters=[AccountStats.trader_name == self.trader_name], + order=AccountStats.timestamp.desc(), + limit=1, + return_type="domain", + ) + if not records: + return self.account + latest_record: AccountStats = records[0] + + # create new orm object from latest record + account_stats_model = AccountStatsModel.from_orm(latest_record) + account = AccountStats() + fill_domain_from_dict(account, account_stats_model.model_dump(exclude={"id", "positions"})) + + positions: List[Position] = [] + for position_domain in latest_record.positions: + position_model = PositionModel.from_orm(position_domain) + self.logger.debug("current position:{}".format(position_model)) + position = Position() + fill_domain_from_dict(position, position_model.model_dump()) + positions.append(position) + + account.positions = positions + + return account + + def on_trading_open(self, timestamp): + self.logger.info("on_trading_open:{}".format(timestamp)) + if is_same_date(timestamp, self.start_timestamp): + return + self.account = self.load_account() + + def on_trading_error(self, timestamp, error): + pass + + def on_trading_finish(self, timestamp): + pass + + def on_trading_signals(self, trading_signals: List[TradingSignal]): + for trading_signal in trading_signals: + try: + self.handle_trading_signal(trading_signal) + except Exception as e: + self.logger.exception(e) + self.on_trading_error(timestamp=trading_signal.happen_timestamp, error=e) + + def handle_trading_signal(self, trading_signal: TradingSignal): + entity_id = trading_signal.entity_id + happen_timestamp = trading_signal.happen_timestamp + order_type = trading_signal_type_to_order_type(trading_signal.trading_signal_type) + trading_level = trading_signal.trading_level.value + if order_type: + try: + kdata = get_kdata( + provider=self.provider, + entity_id=entity_id, + level=trading_level, + start_timestamp=happen_timestamp, + end_timestamp=happen_timestamp, + limit=1, + adjust_type=self.adjust_type, + ) + except Exception as e: + self.logger.error(e) + raise WrongKdataError("could not get kdata") + + if pd_is_not_null(kdata): + entity_type, _, _ = decode_entity_id(kdata["entity_id"][0]) + + the_price = kdata["close"][0] + + if the_price: + if trading_signal.position_pct: + self.order_by_position_pct( + entity_id=entity_id, + order_price=the_price, + order_timestamp=happen_timestamp, + order_position_pct=trading_signal.position_pct, + order_type=order_type, + ) + elif trading_signal.order_money: + self.order_by_money( + entity_id=entity_id, + order_price=the_price, + order_timestamp=happen_timestamp, + order_money=trading_signal.order_money, + order_type=order_type, + ) + elif trading_signal.order_amount: + self.order_by_amount( + entity_id=entity_id, + order_price=the_price, + order_timestamp=happen_timestamp, + order_amount=trading_signal.order_amount, + order_type=order_type, + ) + else: + assert False + else: + self.logger.warning( + "ignore trading signal,wrong kdata,entity_id:{},timestamp:{},kdata:{}".format( + entity_id, happen_timestamp, kdata.to_dict(orient="records") + ) + ) + + else: + self.logger.warning( + "ignore trading signal,could not get kdata,entity_id:{},timestamp:{}".format( + entity_id, happen_timestamp + ) + ) + + def on_trading_close(self, timestamp): + self.logger.info("on_trading_close:{}".format(timestamp)) + # remove the empty position + self.account.positions = [ + position for position in self.account.positions if position.long_amount > 0 or position.short_amount > 0 + ] + + # clear the data which need recomputing + the_id = "{}_{}".format(self.trader_name, to_time_str(timestamp, TIME_FORMAT_ISO8601)) + + self.account.value = 0 + self.account.all_value = 0 + for position in self.account.positions: + entity_type, _, _ = decode_entity_id(position.entity_id) + data_schema = get_kdata_schema(entity_type, level=IntervalLevel.LEVEL_1DAY, adjust_type=self.adjust_type) + + kdata = get_kdata( + provider=self.provider, + level=IntervalLevel.LEVEL_1DAY, + entity_id=position.entity_id, + order=data_schema.timestamp.desc(), + end_timestamp=timestamp, + limit=1, + adjust_type=self.adjust_type, + ) + + closing_price = kdata["close"][0] + + position.available_long = position.long_amount + position.available_short = position.short_amount + + if closing_price: + if (position.long_amount is not None) and position.long_amount > 0: + position.value = position.long_amount * closing_price + self.account.value += position.value + elif (position.short_amount is not None) and position.short_amount > 0: + position.value = 2 * (position.short_amount * position.average_short_price) + position.value -= position.short_amount * closing_price + self.account.value += position.value + + # refresh profit + position.profit = (closing_price - position.average_long_price) * position.long_amount + position.profit_rate = position.profit / (position.average_long_price * position.long_amount) + + else: + self.logger.warning( + "could not refresh close value for position:{},timestamp:{}".format(position.entity_id, timestamp) + ) + + position.id = "{}_{}_{}".format( + self.trader_name, position.entity_id, to_time_str(timestamp, TIME_FORMAT_ISO8601) + ) + position.timestamp = to_pd_timestamp(timestamp) + position.account_stats_id = the_id + + self.account.id = the_id + self.account.all_value = self.account.value + self.account.cash + self.account.closing = True + self.account.timestamp = to_pd_timestamp(timestamp) + self.account.profit = self.account.all_value - self.account.input_money + self.account.profit_rate = self.account.profit / self.account.input_money + + self.session.add(self.account) + self.session.commit() + account_info = ( + f"on_trading_close,holding size:{len(self.account.positions)} profit:{self.account.profit} input_money:{self.account.input_money} " + f"cash:{self.account.cash} value:{self.account.value} all_value:{self.account.all_value}" + ) + self.logger.info(account_info) + + def get_current_position(self, entity_id, create_if_not_exist=False) -> Optional[Position]: + """ + get position for entity_id + + :param entity_id: the entity id + :param create_if_not_exist: create an empty position if not exist in current account + :return: + """ + for position in self.account.positions: + if position.entity_id == entity_id: + return position + if create_if_not_exist: + trading_t = self.entity_schema.get_trading_t() + current_position = Position( + trader_name=self.trader_name, + entity_id=entity_id, + long_amount=0, + available_long=0, + average_long_price=0, + short_amount=0, + available_short=0, + average_short_price=0, + profit=0, + value=0, + trading_t=trading_t, + ) + # add it to account + self.account.positions.append(current_position) + return current_position + return None + + def get_current_account(self): + return self.account + + def update_position(self, current_position, order_amount, current_price, order_type, timestamp): + """ + + :param timestamp: + :type timestamp: + :param current_position: + :type current_position: Position + :param order_amount: + :type order_amount: + :param current_price: + :type current_price: + :param order_type: + :type order_type: + """ + if order_type == OrderType.order_long: + need_money = (order_amount * current_price) * (1 + self.slippage + self.buy_cost) + if self.account.cash < need_money: + if self.rich_mode: + self.input_money() + else: + raise NotEnoughMoneyError() + + self.account.cash -= need_money + + # 计算平均价 + long_amount = current_position.long_amount + order_amount + if long_amount == 0: + current_position.average_long_price = 0 + current_position.average_long_price = ( + current_position.average_long_price * current_position.long_amount + current_price * order_amount + ) / long_amount + + current_position.long_amount = long_amount + + if current_position.trading_t == 0: + current_position.available_long += order_amount + + elif order_type == OrderType.order_short: + need_money = (order_amount * current_price) * (1 + self.slippage + self.buy_cost) + if self.account.cash < need_money: + if self.rich_mode: + self.input_money() + else: + raise NotEnoughMoneyError() + + self.account.cash -= need_money + + short_amount = current_position.short_amount + order_amount + current_position.average_short_price = ( + current_position.average_short_price * current_position.short_amount + current_price * order_amount + ) / short_amount + + current_position.short_amount = short_amount + + if current_position.trading_t == 0: + current_position.available_short += order_amount + + elif order_type == OrderType.order_close_long: + self.account.cash += order_amount * current_price * (1 - self.slippage - self.sell_cost) + # FIXME:如果没卖完,重新计算计算平均价 + + current_position.available_long -= order_amount + current_position.long_amount -= order_amount + + elif order_type == OrderType.order_close_short: + self.account.cash += 2 * (order_amount * current_position.average_short_price) + self.account.cash -= order_amount * current_price * (1 + self.slippage + self.sell_cost) + + current_position.available_short -= order_amount + current_position.short_amount -= order_amount + else: + assert False + + # save the order info to db + order_id = "{}_{}_{}_{}".format( + self.trader_name, order_type, current_position.entity_id, to_time_str(timestamp, TIME_FORMAT_ISO8601) + ) + order = Order( + id=order_id, + timestamp=to_pd_timestamp(timestamp), + trader_name=self.trader_name, + entity_id=current_position.entity_id, + order_price=current_price, + order_amount=order_amount, + order_type=order_type.value, + level=self.level.value, + status="success", + ) + self.session.add(order) + self.session.commit() + + def cal_amount_by_money( + self, + order_price: float, + order_money: float, + ): + if order_money > self.account.cash: + if self.rich_mode: + self.input_money() + else: + raise NotEnoughMoneyError() + + cost = order_price * (1 + self.slippage + self.buy_cost) + order_amount = order_money // cost + + return order_amount + + def cal_amount_by_position_pct(self, entity_id, order_price: float, order_position_pct: float, order_type): + if order_type == OrderType.order_long or order_type == OrderType.order_short: + cost = order_price * (1 + self.slippage + self.buy_cost) + want_pay = self.account.cash * order_position_pct + order_amount = want_pay // cost + + if order_amount < 1: + if self.rich_mode: + self.input_money() + order_amount = max((self.account.cash * order_position_pct) // cost, 1) + else: + raise NotEnoughMoneyError() + return order_amount + elif order_type == OrderType.order_close_long or order_type == OrderType.order_close_short: + current_position = self.get_current_position(entity_id=entity_id, create_if_not_exist=True) + if order_type == OrderType.order_close_long: + available = current_position.available_long + else: + available = current_position.available_short + if available > 0: + if order_position_pct == 1.0: + order_amount = available + else: + order_amount = math.floor(available * order_position_pct) + return order_amount + else: + raise NotEnoughPositionError() + + def order_by_position_pct( + self, + entity_id, + order_timestamp, + order_price: float, + order_type: OrderType, + order_position_pct: float = 0.2, + ): + order_amount = self.cal_amount_by_position_pct( + entity_id=entity_id, order_price=order_price, order_position_pct=order_position_pct, order_type=order_type + ) + + self.order_by_amount( + entity_id=entity_id, + order_price=order_price, + order_amount=order_amount, + order_timestamp=order_timestamp, + order_type=order_type, + ) + + def order_by_money( + self, + entity_id, + order_timestamp, + order_price: float, + order_type: OrderType, + order_money: float, + ): + if order_type not in (OrderType.order_long, OrderType.order_short): + raise InvalidOrderParamError(f"order type: {order_type.value} not support order_by_money") + + order_amount = self.cal_amount_by_money(order_price=order_price, order_money=order_money) + self.order_by_amount( + entity_id=entity_id, + order_price=order_price, + order_amount=order_amount, + order_timestamp=order_timestamp, + order_type=order_type, + ) + + def order_by_amount( + self, + entity_id, + order_price, + order_timestamp, + order_type, + order_amount, + ): + current_position = self.get_current_position(entity_id=entity_id, create_if_not_exist=True) + + # 开多 + if order_type == OrderType.order_long: + if current_position.short_amount > 0: + raise InvalidOrderError("close the short position before open long") + + self.update_position(current_position, order_amount, order_price, order_type, order_timestamp) + # 开空 + elif order_type == OrderType.order_short: + if current_position.long_amount > 0: + raise InvalidOrderError("close the long position before open short") + + self.update_position(current_position, order_amount, order_price, order_type, order_timestamp) + # 平多 + elif order_type == OrderType.order_close_long: + if current_position.available_long >= order_amount: + self.update_position(current_position, order_amount, order_price, order_type, order_timestamp) + else: + raise NotEnoughPositionError() + # 平空 + elif order_type == OrderType.order_close_short: + if current_position.available_short >= order_amount: + self.update_position(current_position, order_amount, order_price, order_type, order_timestamp) + else: + raise Exception("not enough position") + + +# the __all__ is generated +__all__ = ["AccountService", "SimAccountService"] diff --git a/src/zvt/trader/trader.py b/src/zvt/trader/trader.py new file mode 100644 index 00000000..f75a3434 --- /dev/null +++ b/src/zvt/trader/trader.py @@ -0,0 +1,570 @@ +# -*- coding: utf-8 -*- +import logging +import time +from typing import List, Union, Type, Tuple + +import pandas as pd + +from zvt.contract import IntervalLevel, TradableEntity, AdjustType +from zvt.contract.drawer import Drawer +from zvt.contract.factor import Factor, TargetType +from zvt.contract.normal_data import NormalData +from zvt.domain import Stock +from zvt.trader import TradingSignal, TradingSignalType, TradingListener +from zvt.trader.sim_account import SimAccountService +from zvt.trader.trader_info_api import AccountStatsReader +from zvt.trader.trader_schemas import AccountStats, Position +from zvt.utils.time_utils import to_pd_timestamp, now_pd_timestamp, to_time_str, is_same_date, date_time_by_interval + + +class Trader(object): + entity_schema: Type[TradableEntity] = None + + def __init__( + self, + entity_ids: List[str] = None, + exchanges: List[str] = None, + codes: List[str] = None, + start_timestamp: Union[str, pd.Timestamp] = None, + end_timestamp: Union[str, pd.Timestamp] = None, + provider: str = None, + level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY, + trader_name: str = None, + real_time: bool = False, + kdata_use_begin_time: bool = False, + draw_result: bool = True, + rich_mode: bool = False, + adjust_type: AdjustType = None, + profit_threshold=(3, -0.3), + keep_history=False, + pre_load_days=365, + ) -> None: + assert self.entity_schema is not None + assert start_timestamp is not None + assert end_timestamp is not None + + self.logger = logging.getLogger(__name__) + + if trader_name: + self.trader_name = trader_name + else: + self.trader_name = type(self).__name__.lower() + + self.entity_ids = entity_ids + self.exchanges = exchanges + self.codes = codes + self.provider = provider + # make sure the min level factor correspond to the provider and level + self.level = IntervalLevel(level) + self.real_time = real_time + self.start_timestamp = to_pd_timestamp(start_timestamp) + self.end_timestamp = to_pd_timestamp(end_timestamp) + self.pre_load_days = pre_load_days + + self.trading_dates = self.entity_schema.get_trading_dates( + start_date=self.start_timestamp, end_date=self.end_timestamp + ) + + if real_time: + self.logger.info( + "real_time mode, end_timestamp should be future,you could set it big enough for running forever" + ) + assert self.end_timestamp >= now_pd_timestamp() + + # false: 收到k线时,该k线已完成 + # true: 收到k线时,该k线可能未完成 + self.kdata_use_begin_time = kdata_use_begin_time + self.draw_result = draw_result + self.rich_mode = rich_mode + + self.adjust_type = AdjustType(adjust_type) + self.profit_threshold = profit_threshold + self.keep_history = keep_history + + self.level_map_long_targets = {} + self.level_map_short_targets = {} + self.trading_signals: List[TradingSignal] = [] + self.trading_signal_listeners: List[TradingListener] = [] + + self.account_service = SimAccountService( + entity_schema=self.entity_schema, + trader_name=self.trader_name, + timestamp=self.start_timestamp, + provider=self.provider, + level=self.level, + rich_mode=self.rich_mode, + adjust_type=self.adjust_type, + keep_history=self.keep_history, + ) + + self.register_trading_signal_listener(self.account_service) + + self.factors = self.init_factors( + entity_ids=self.entity_ids, + entity_schema=self.entity_schema, + exchanges=self.exchanges, + codes=self.codes, + start_timestamp=date_time_by_interval(self.start_timestamp, -self.pre_load_days), + end_timestamp=self.end_timestamp, + adjust_type=self.adjust_type, + ) + + if self.factors: + self.trading_level_asc = list(set([IntervalLevel(factor.level) for factor in self.factors])) + self.trading_level_asc.sort() + + self.logger.info(f"trader level:{self.level},factors level:{self.trading_level_asc}") + + if self.level != self.trading_level_asc[0]: + raise Exception("trader level should be the min of the factors") + + self.trading_level_desc = list(self.trading_level_asc) + self.trading_level_desc.reverse() + else: + self.trading_level_asc = [self.level] + self.trading_level_desc = [self.level] + self.on_init() + + def on_init(self): + self.logger.info(f"trader:{self.trader_name} on_start") + + def init_entities(self, timestamp): + """ + init the entities for timestamp + + :param timestamp: + :return: + """ + self.logger.info(f"timestamp: {timestamp} init_entities") + return self.entity_ids + + def init_factors( + self, entity_ids, entity_schema, exchanges, codes, start_timestamp, end_timestamp, adjust_type=None + ): + """ + overwrite it to init factors if you want to use factor computing model + :param adjust_type: + + """ + return [] + + def update_targets_by_level( + self, + level: IntervalLevel, + long_targets: List[str], + short_targets: List[str], + ) -> None: + """ + the trading signals is generated in min level,before that,we should cache targets of all levels + + :param level: + :param long_targets: + :param short_targets: + """ + self.logger.debug( + f"level:{level},old long targets:{self.level_map_long_targets.get(level)},new long targets:{long_targets}" + ) + self.level_map_long_targets[level] = long_targets + + self.logger.debug( + f"level:{level},old short targets:{self.level_map_short_targets.get(level)},new short targets:{short_targets}" + ) + self.level_map_short_targets[level] = short_targets + + def get_long_targets_by_level(self, level: IntervalLevel) -> List[str]: + return self.level_map_long_targets.get(level) + + def get_short_targets_by_level(self, level: IntervalLevel) -> List[str]: + return self.level_map_short_targets.get(level) + + def on_targets_selected_from_levels(self, timestamp) -> Tuple[List[str], List[str]]: + """ + this method's called in every min level cycle to select targets in all levels generated by the previous cycle + the default implementation is selecting the targets in all levels + overwrite it for your custom logic + + :param timestamp: current event time + :return: long targets, short targets + """ + + long_selected = None + + short_selected = None + + for level in self.trading_level_desc: + long_targets = self.level_map_long_targets.get(level) + # long must in all + if long_targets: + long_targets = set(long_targets) + if long_selected is None: + long_selected = long_targets + else: + long_selected = long_selected & long_targets + else: + long_selected = set() + + short_targets = self.level_map_short_targets.get(level) + # short any + if short_targets: + short_targets = set(short_targets) + if short_selected is None: + short_selected = short_targets + else: + short_selected = short_selected | short_targets + + return long_selected, short_selected + + def get_current_account(self) -> AccountStats: + return self.account_service.get_current_account() + + def get_current_positions(self) -> List[Position]: + return self.get_current_account().positions + + def long_position_control(self): + positions = self.get_current_positions() + + position_pct = 1.0 + if not positions: + # 没有仓位,买2成 + position_pct = 0.2 + elif len(positions) <= 10: + # 小于10个持仓,买5成 + position_pct = 0.5 + + # 买完 + return position_pct + + def short_position_control(self): + # 卖完 + return 1.0 + + def on_profit_control(self): + if self.profit_threshold and self.get_current_positions(): + positive = self.profit_threshold[0] + negative = self.profit_threshold[1] + close_long_entity_ids = [] + for position in self.get_current_positions(): + if position.available_long > 1: + # 止盈 + if position.profit_rate >= positive: + close_long_entity_ids.append(position.entity_id) + self.logger.info(f"close profit {position.profit_rate} for {position.entity_id}") + # 止损 + if position.profit_rate <= negative: + close_long_entity_ids.append(position.entity_id) + self.logger.info(f"cut lost {position.profit_rate} for {position.entity_id}") + + return close_long_entity_ids, None + return None, None + + def buy(self, timestamp, entity_ids, ignore_in_position=True): + if ignore_in_position: + account = self.get_current_account() + current_holdings = [] + if account.positions: + current_holdings = [ + position.entity_id + for position in account.positions + if position != None and position.available_long > 0 + ] + + entity_ids = set(entity_ids) - set(current_holdings) + + if entity_ids: + position_pct = self.long_position_control() + position_pct = (1.0 / len(entity_ids)) * position_pct + + due_timestamp = to_pd_timestamp(timestamp) + pd.Timedelta(seconds=self.level.to_second()) + for entity_id in entity_ids: + trading_signal = TradingSignal( + entity_id=entity_id, + due_timestamp=due_timestamp, + happen_timestamp=timestamp, + trading_signal_type=TradingSignalType.open_long, + trading_level=self.level, + position_pct=position_pct, + ) + self.trading_signals.append(trading_signal) + + def sell(self, timestamp, entity_ids): + # current position + account = self.get_current_account() + current_holdings = [] + if account.positions: + current_holdings = [ + position.entity_id for position in account.positions if position != None and position.available_long > 0 + ] + + shorted = set(current_holdings) & set(entity_ids) + + if shorted: + position_pct = self.short_position_control() + + due_timestamp = to_pd_timestamp(timestamp) + pd.Timedelta(seconds=self.level.to_second()) + for entity_id in shorted: + trading_signal = TradingSignal( + entity_id=entity_id, + due_timestamp=due_timestamp, + happen_timestamp=timestamp, + trading_signal_type=TradingSignalType.close_long, + trading_level=self.level, + position_pct=position_pct, + ) + self.trading_signals.append(trading_signal) + + def on_finish(self, timestamp): + self.on_trading_finish(timestamp) + # show the result + if self.draw_result: + reader = AccountStatsReader(trader_names=[self.trader_name]) + df = reader.data_df + drawer = Drawer( + main_data=NormalData(df.copy()[["trader_name", "timestamp", "all_value"]], category_field="trader_name") + ) + drawer.draw_line(show=True) + + def on_factor_targets_filtered( + self, timestamp, level, factor: Factor, long_targets: List[str], short_targets: List[str] + ) -> Tuple[List[str], List[str]]: + """ + overwrite it to filter the targets from factor + + :param timestamp: the event time + :param level: the level + :param factor: the factor + :param long_targets: the long targets from the factor + :param short_targets: the short targets from the factor + :return: filtered long targets, filtered short targets + """ + self.logger.info(f"on_targets_filtered {level} long:{long_targets}") + + if len(long_targets) > 10: + long_targets = long_targets[0:10] + self.logger.info(f"on_targets_filtered {level} filtered long:{long_targets}") + + return long_targets, short_targets + + def in_trading_date(self, timestamp): + return to_time_str(timestamp) in self.trading_dates + + def on_time(self, timestamp: pd.Timestamp): + """ + called in every min level cycle + + :param timestamp: event time + """ + self.logger.debug(f"current timestamp:{timestamp}") + + def on_trading_signals(self, trading_signals: List[TradingSignal]): + for l in self.trading_signal_listeners: + l.on_trading_signals(trading_signals) + # clear after all listener handling + self.trading_signals = [] + + def on_trading_open(self, timestamp): + for l in self.trading_signal_listeners: + l.on_trading_open(timestamp) + + def on_trading_close(self, timestamp): + for l in self.trading_signal_listeners: + l.on_trading_close(timestamp) + + def on_trading_finish(self, timestamp): + for l in self.trading_signal_listeners: + l.on_trading_finish(timestamp) + + def on_trading_error(self, timestamp, error): + for l in self.trading_signal_listeners: + l.on_trading_error(timestamp, error) + + def on_non_trading_day(self, timestamp): + self.logger.info(f"on_non_trading_day: {timestamp}") + + def get_factors_by_level(self, level): + return [factor for factor in self.factors if factor.level == level] + + def handle_factor_targets(self, timestamp: pd.Timestamp): + """ + select targets from factors + :param timestamp: the timestamp for next kdata coming + """ + # 一般来说factor计算 多标的 历史数据比较快,多级别的计算也比较方便,常用于全市场标的粗过滤 + # 更细节的控制可以在on_targets_filtered里进一步处理 + # 也可以在on_time里面设计一些自己的逻辑配合过滤 + # 多级别的遍历算法要点: + # 1)计算各级别的 标的,通过 on_factor_targets_filtered 过滤,缓存在level_map_long_targets,level_map_short_targets + # 2)在最小的level通过 on_targets_selected_from_levels 根据多级别的缓存标的,生成最终的选中标的 + # 这里需要注意的是,小级别拿到上一个周期的大级别的标的,这是合理的 + for level in self.trading_level_asc: + self.logger.info(f"level: {level}") + # in every cycle, all level factor do its job in its time + if self.entity_schema.is_finished_kdata_timestamp(timestamp=timestamp, level=level): + all_long_targets = [] + all_short_targets = [] + + # 从该level的factor中过滤targets + current_level_factors = self.get_factors_by_level(level=level) + for factor in current_level_factors: + long_targets = factor.get_targets(timestamp=timestamp, target_type=TargetType.positive) + short_targets = factor.get_targets(timestamp=timestamp, target_type=TargetType.negative) + + if long_targets or short_targets: + long_targets, short_targets = self.on_factor_targets_filtered( + timestamp=timestamp, + level=level, + factor=factor, + long_targets=long_targets, + short_targets=short_targets, + ) + + if long_targets: + all_long_targets += long_targets + if short_targets: + all_short_targets += short_targets + + # 将各级别的targets缓存在level_map_long_targets,level_map_short_targets + self.update_targets_by_level(level, all_long_targets, all_short_targets) + + def run(self): + # iterate timestamp of the min level,e.g,9:30,9:35,9.40...for 5min level + # timestamp represents the timestamp in kdata + for timestamp in self.entity_schema.get_interval_timestamps( + start_date=self.start_timestamp, end_date=self.end_timestamp, level=self.level + ): + self.logger.info(f">>>>>>>>>>") + + self.entity_ids = self.init_entities(timestamp=timestamp) + self.logger.info(f"current entities: {self.entity_ids}") + + if not self.in_trading_date(timestamp=timestamp): + self.on_non_trading_day(timestamp=timestamp) + continue + + # on_trading_open to set the account + if self.level >= IntervalLevel.LEVEL_1DAY or ( + self.level != IntervalLevel.LEVEL_1DAY and self.entity_schema.is_open_timestamp(timestamp) + ): + self.on_trading_open(timestamp=timestamp) + + # the signals were generated by previous timestamp kdata + if self.trading_signals: + self.logger.info("current signals:") + for signal in self.trading_signals: + self.logger.info(str(signal)) + self.on_trading_signals(self.trading_signals) + + for factor in self.factors: + factor.add_entities(entity_ids=self.entity_ids) + + waiting_seconds = 0 + + if self.level == IntervalLevel.LEVEL_1DAY: + if is_same_date(timestamp, now_pd_timestamp()): + while True: + self.logger.info(f"time is:{now_pd_timestamp()},just smoke for minutes") + time.sleep(600) + current = now_pd_timestamp() + if current.hour >= 19: + waiting_seconds = 20 + break + + elif self.real_time: + # all factor move on to handle the coming data + if self.kdata_use_begin_time: + real_end_timestamp = timestamp + pd.Timedelta(seconds=self.level.to_second()) + else: + real_end_timestamp = timestamp + + seconds = (now_pd_timestamp() - real_end_timestamp).total_seconds() + waiting_seconds = self.level.to_second() - seconds + + # meaning the future kdata not ready yet,we could move on to check + if waiting_seconds > 0: + # iterate the factor from min to max which in finished timestamp kdata + for level in self.trading_level_asc: + if self.entity_schema.is_finished_kdata_timestamp(timestamp=timestamp, level=level): + factors = self.get_factors_by_level(level=level) + for factor in factors: + factor.move_on(to_timestamp=timestamp, timeout=waiting_seconds + 20) + + if self.factors: + self.handle_factor_targets(timestamp=timestamp) + + self.on_time(timestamp=timestamp) + + long_selected, short_selected = self.on_targets_selected_from_levels(timestamp) + + # 处理 止赢 止损 + passive_short, _ = self.on_profit_control() + if passive_short: + if not short_selected: + short_selected = passive_short + else: + short_selected = list(set(short_selected) | set(passive_short)) + + if short_selected: + self.sell(timestamp=timestamp, entity_ids=short_selected) + if long_selected: + self.buy(timestamp=timestamp, entity_ids=long_selected) + + # on_trading_close to calculate date account + if self.level >= IntervalLevel.LEVEL_1DAY or ( + self.level != IntervalLevel.LEVEL_1DAY and self.entity_schema.is_close_timestamp(timestamp) + ): + self.on_trading_close(timestamp) + + self.logger.info(f"<<<<<<<<<<\n") + + self.on_finish(timestamp) + + def register_trading_signal_listener(self, listener): + if listener not in self.trading_signal_listeners: + self.trading_signal_listeners.append(listener) + + def deregister_trading_signal_listener(self, listener): + if listener in self.trading_signal_listeners: + self.trading_signal_listeners.remove(listener) + + +class StockTrader(Trader): + entity_schema = Stock + + def __init__( + self, + entity_ids: List[str] = None, + exchanges: List[str] = None, + codes: List[str] = None, + start_timestamp: Union[str, pd.Timestamp] = None, + end_timestamp: Union[str, pd.Timestamp] = None, + provider: str = None, + level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY, + trader_name: str = None, + real_time: bool = False, + kdata_use_begin_time: bool = False, + draw_result: bool = True, + rich_mode: bool = False, + adjust_type: AdjustType = AdjustType.hfq, + profit_threshold=(3, -0.3), + keep_history=False, + ) -> None: + super().__init__( + entity_ids, + exchanges, + codes, + start_timestamp, + end_timestamp, + provider, + level, + trader_name, + real_time, + kdata_use_begin_time, + draw_result, + rich_mode, + adjust_type, + profit_threshold, + keep_history, + ) + + +# the __all__ is generated +__all__ = ["Trader", "StockTrader"] diff --git a/src/zvt/trader/trader_info_api.py b/src/zvt/trader/trader_info_api.py new file mode 100644 index 00000000..94b996bc --- /dev/null +++ b/src/zvt/trader/trader_info_api.py @@ -0,0 +1,171 @@ +# -*- coding: utf-8 -*- +from typing import List, Union + +import pandas as pd + +from zvt.contract import IntervalLevel +from zvt.contract.api import get_data, get_db_session +from zvt.contract.drawer import Drawer +from zvt.contract.normal_data import NormalData +from zvt.contract.reader import DataReader +from zvt.trader.trader_schemas import AccountStats, Order, TraderInfo, Position + + +def clear_trader(trader_name, session=None): + if not session: + session = get_db_session("zvt", data_schema=TraderInfo) + session.query(TraderInfo).filter(TraderInfo.trader_name == trader_name).delete() + session.query(AccountStats).filter(AccountStats.trader_name == trader_name).delete() + session.query(Position).filter(Position.trader_name == trader_name).delete() + session.query(Order).filter(Order.trader_name == trader_name).delete() + session.commit() + + +def get_trader_info( + trader_name=None, + return_type="df", + start_timestamp=None, + end_timestamp=None, + filters=None, + session=None, + order=None, + limit=None, +) -> List[TraderInfo]: + if trader_name: + if filters: + filters = filters + [TraderInfo.trader_name == trader_name] + else: + filters = [TraderInfo.trader_name == trader_name] + + return get_data( + data_schema=TraderInfo, + entity_id=None, + codes=None, + level=None, + provider="zvt", + columns=None, + return_type=return_type, + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + filters=filters, + session=session, + order=order, + limit=limit, + ) + + +def get_order_securities(trader_name): + items = ( + get_db_session(provider="zvt", data_schema=Order) + .query(Order.entity_id) + .filter(Order.trader_name == trader_name) + .group_by(Order.entity_id) + .all() + ) + + return [item[0] for item in items] + + +class AccountStatsReader(DataReader): + def __init__( + self, + start_timestamp: Union[str, pd.Timestamp] = None, + end_timestamp: Union[str, pd.Timestamp] = None, + columns: List = None, + filters: List = None, + order: object = None, + level: IntervalLevel = IntervalLevel.LEVEL_1DAY, + trader_names: List[str] = None, + ) -> None: + self.trader_names = trader_names + + self.filters = filters + + if self.trader_names: + filter = [AccountStats.trader_name == name for name in self.trader_names] + if self.filters: + self.filters += filter + else: + self.filters = filter + super().__init__( + AccountStats, + None, + None, + None, + None, + None, + None, + start_timestamp, + end_timestamp, + columns, + self.filters, + order, + None, + level, + category_field="trader_name", + time_field="timestamp", + keep_window=None, + ) + + def draw_line(self, show=True): + drawer = Drawer( + main_data=NormalData( + self.data_df.copy()[["trader_name", "timestamp", "all_value"]], category_field="trader_name" + ) + ) + return drawer.draw_line(show=show) + + +class OrderReader(DataReader): + def __init__( + self, + start_timestamp: Union[str, pd.Timestamp] = None, + end_timestamp: Union[str, pd.Timestamp] = None, + columns: List = None, + filters: List = None, + order: object = None, + level: IntervalLevel = None, + trader_names: List[str] = None, + ) -> None: + self.trader_names = trader_names + + self.filters = filters + + if self.trader_names: + filter = [Order.trader_name == name for name in self.trader_names] + if self.filters: + self.filters += filter + else: + self.filters = filter + + super().__init__( + Order, + None, + None, + None, + None, + None, + None, + start_timestamp, + end_timestamp, + columns, + self.filters, + order, + None, + level, + category_field="trader_name", + time_field="timestamp", + keep_window=None, + ) + + +if __name__ == "__main__": + reader = AccountStatsReader(trader_names=["000338_ma_trader"]) + drawer = Drawer( + main_data=NormalData( + reader.data_df.copy()[["trader_name", "timestamp", "all_value"]], category_field="trader_name" + ) + ) + drawer.draw_line() +# the __all__ is generated +__all__ = ["clear_trader", "get_trader_info", "get_order_securities", "AccountStatsReader", "OrderReader"] diff --git a/src/zvt/trader/trader_models.py b/src/zvt/trader/trader_models.py new file mode 100644 index 00000000..cbe3064f --- /dev/null +++ b/src/zvt/trader/trader_models.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +from typing import List + +from zvt.contract.model import MixinModel + + +class PositionModel(MixinModel): + #: 机器人名字 + trader_name: str + #: 做多数量 + long_amount: float + #: 可平多数量 + available_long: float + #: 平均做多价格 + average_long_price: float + #: 做空数量 + short_amount: float + #: 可平空数量 + available_short: float + #: 平均做空价格 + average_short_price: float + #: 盈亏 + profit: float + #: 盈亏比例 + profit_rate: float + #: 市值 或者 占用的保证金(方便起见,总是100%) + value: float + #: 交易类型(0代表T+0,1代表T+1) + trading_t: int + + +class AccountStatsModel(MixinModel): + #: 投入金额 + input_money: float + #: 机器人名字 + trader_name: str + #: 具体仓位 + positions: List[PositionModel] + #: 市值 + value: float + #: 可用现金 + cash: float + #: value + cash + all_value: float + + #: 盈亏 + profit: float + #: 盈亏比例 + profit_rate: float + + #: 收盘计算 + closing: bool diff --git a/zvt/domain/trader_info.py b/src/zvt/trader/trader_schemas.py similarity index 60% rename from zvt/domain/trader_info.py rename to src/zvt/trader/trader_schemas.py index 4711a022..db01b83f 100644 --- a/zvt/domain/trader_info.py +++ b/src/zvt/trader/trader_schemas.py @@ -1,19 +1,22 @@ # -*- coding: utf-8 -*- from sqlalchemy import Column, String, DateTime, Boolean, Float, Integer, ForeignKey -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import declarative_base from sqlalchemy.orm import relationship from zvt.contract import Mixin from zvt.contract.register import register_schema -from zvt.utils import to_string +from zvt.utils.decorator import to_string TraderBase = declarative_base() -# trader信息 class TraderInfo(TraderBase, Mixin): - __tablename__ = 'trader_info' - # 机器人名字 + """ + trader info + """ + + __tablename__ = "trader_info" + #: 机器人名字 trader_name = Column(String(length=128)) entity_type = Column(String(length=128)) @@ -26,87 +29,90 @@ class TraderInfo(TraderBase, Mixin): kdata_adjust_type = Column(String(length=32)) -# account stats of every day @to_string class AccountStats(TraderBase, Mixin): - __tablename__ = 'account_stats' + """ + account stats of every day + """ + + __tablename__ = "account_stats" input_money = Column(Float) - # 机器人名字 + #: 机器人名字 trader_name = Column(String(length=128)) - # 可用现金 + #: 可用现金 cash = Column(Float) - # 具体仓位 + #: 具体仓位 positions = relationship("Position", back_populates="account_stats") - # 市值 + #: 市值 value = Column(Float) - # 市值+cash + #: 市值+cash all_value = Column(Float) - # 盈亏 + #: 盈亏 profit = Column(Float) - # 盈亏比例 + #: 盈亏比例 profit_rate = Column(Float) - # 收盘计算 + #: 收盘计算 closing = Column(Boolean) -# the position for specific entity of every day +#: the position for specific entity of every day class Position(TraderBase, Mixin): - __tablename__ = 'position' + __tablename__ = "position" - # 机器人名字 + #: 机器人名字 trader_name = Column(String(length=128)) - # 账户id - account_stats_id = Column(Integer, ForeignKey('account_stats.id')) + #: 账户id + account_stats_id = Column(Integer, ForeignKey("account_stats.id")) account_stats = relationship("AccountStats", back_populates="positions") - # 做多数量 + #: 做多数量 long_amount = Column(Float) - # 可平多数量 + #: 可平多数量 available_long = Column(Float) - # 平均做多价格 + #: 平均做多价格 average_long_price = Column(Float) - # 做空数量 + #: 做空数量 short_amount = Column(Float) - # 可平空数量 + #: 可平空数量 available_short = Column(Float) - # 平均做空价格 + #: 平均做空价格 average_short_price = Column(Float) - # 盈亏 + #: 盈亏 profit = Column(Float) - # 盈亏比例 + #: 盈亏比例 profit_rate = Column(Float) - # 市值 或者 占用的保证金(方便起见,总是100%) + #: 市值 或者 占用的保证金(方便起见,总是100%) value = Column(Float) - # 交易类型(0代表T+0,1代表T+1) + #: 交易类型(0代表T+0,1代表T+1) trading_t = Column(Integer) -# 委托单 +#: 委托单 class Order(TraderBase, Mixin): - __tablename__ = 'order' + __tablename__ = "order" - # 机器人名字 + #: 机器人名字 trader_name = Column(String(length=128)) - # 订单价格 + #: 订单价格 order_price = Column(Float) - # 订单数量 + #: 订单数量 order_amount = Column(Float) - # 订单类型 + #: 订单类型 order_type = Column(String(length=64)) - # 订单状态 + #: 订单状态 status = Column(String(length=64)) - # 产生订单的selector/factor level + #: 产生订单的selector/factor level level = Column(String(length=32)) -register_schema(providers=['zvt'], db_name='trader_info', schema_base=TraderBase) +register_schema(providers=["zvt"], db_name="trader_info", schema_base=TraderBase) # the __all__ is generated -__all__ = ['TraderInfo', 'AccountStats', 'Position', 'Order'] +__all__ = ["TraderInfo", "AccountStats", "Position", "Order"] diff --git a/src/zvt/trading/__init__.py b/src/zvt/trading/__init__.py new file mode 100644 index 00000000..2686fff0 --- /dev/null +++ b/src/zvt/trading/__init__.py @@ -0,0 +1,5 @@ +# -*- coding: utf-8 -*- + + +# the __all__ is generated +__all__ = [] diff --git a/src/zvt/trading/common.py b/src/zvt/trading/common.py new file mode 100644 index 00000000..c5e88439 --- /dev/null +++ b/src/zvt/trading/common.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +from enum import Enum + + +class ExecutionStatus(Enum): + init = "init" + success = "success" + failed = "failed" + + +# the __all__ is generated +__all__ = ["ExecutionStatus"] diff --git a/src/zvt/trading/trading_models.py b/src/zvt/trading/trading_models.py new file mode 100644 index 00000000..43a93976 --- /dev/null +++ b/src/zvt/trading/trading_models.py @@ -0,0 +1,230 @@ +# -*- coding: utf-8 -*- +from datetime import datetime +from typing import List, Optional +from typing import Union + +from pydantic import BaseModel, Field +from pydantic import field_validator + +from zvt.common.query_models import TimeRange, OrderByType +from zvt.contract import IntervalLevel, AdjustType +from zvt.contract.model import MixinModel, CustomModel +from zvt.tag.tag_utils import get_stock_pool_names +from zvt.trader import TradingSignalType +from zvt.trading.common import ExecutionStatus +from zvt.utils.time_utils import date_time_by_interval, current_date +from zvt.utils.time_utils import tomorrow_date, to_pd_timestamp + + +class KdataRequestModel(BaseModel): + entity_ids: List[str] + data_provider: str = Field(default="qmt") + start_timestamp: datetime = Field(default=date_time_by_interval(current_date(), -500)) + end_timestamp: Optional[datetime] = Field(default=None) + level: IntervalLevel = Field(default=IntervalLevel.LEVEL_1DAY) + adjust_type: AdjustType = Field(default=AdjustType.qfq) + + +class KdataModel(BaseModel): + entity_id: str + code: str + name: str + level: IntervalLevel = Field(default=IntervalLevel.LEVEL_1DAY) + datas: List + + +class TSRequestModel(BaseModel): + entity_ids: List[str] + data_provider: str = Field(default="qmt") + days_count: int = Field(default=5) + + +class TSModel(BaseModel): + entity_id: str + code: str + name: str + datas: List + + +class QuoteStatsModel(BaseModel): + #: UNIX时间戳 + time: int + #: 涨停数 + limit_up_count: int + #: 跌停数 + limit_down_count: int + #: 上涨数 + up_count: int + #: 下跌数 + down_count: int + #: 涨幅 + change_pct: float + #: 成交额 + turnover: float + #: 昨日成交额 + pre_turnover: Optional[float] = Field(default=None) + #: 同比 + turnover_change: Optional[float] = Field(default=None) + + +class QueryStockQuoteSettingModel(CustomModel): + stock_pool_name: Optional[str] = Field(default=None) + main_tags: Optional[List[str]] = Field(default=None) + + +class BuildQueryStockQuoteSettingModel(CustomModel): + stock_pool_name: str + main_tags: Optional[List[str]] = Field(default=None) + + @field_validator("stock_pool_name") + @classmethod + def stock_pool_name_existed(cls, v: str) -> str: + if v not in get_stock_pool_names(): + raise ValueError(f"Invalid stock_pool_name: {v}") + return v + + +class QueryTagQuoteModel(CustomModel): + stock_pool_name: str + main_tags: List[str] + + +class QueryStockQuoteModel(CustomModel): + + main_tag: Optional[str] = Field(default=None) + entity_ids: Optional[List[str]] = Field(default=None) + stock_pool_name: Optional[str] = Field(default=None) + # the amount is not huge, just ignore now + limit: int = Field(default=100) + order_by_type: Optional[OrderByType] = Field(default=OrderByType.desc) + order_by_field: Optional[str] = Field(default="change_pct") + + +class StockQuoteModel(MixinModel): + #: 代码 + code: str + #: 名字 + name: str + + #: UNIX时间戳 + time: int + #: 最新价 + price: float + # 涨跌幅 + change_pct: float + # 成交金额 + turnover: float + # 换手率 + turnover_rate: float + #: 是否涨停 + is_limit_up: bool + #: 封涨停金额 + limit_up_amount: Optional[float] = Field(default=None) + #: 是否跌停 + is_limit_down: bool + #: 封跌停金额 + limit_down_amount: Optional[float] = Field(default=None) + #: 5挡卖单金额 + ask_amount: float + #: 5挡买单金额 + bid_amount: float + #: 流通市值 + float_cap: float + #: 总市值 + total_cap: float + + main_tag: Optional[str] = Field(default=None) + sub_tag: Union[str, None] = Field(default=None) + hidden_tags: Union[List[str], None] = Field(default=None) + + +class TagQuoteStatsModel(CustomModel): + main_tag: str + #: 涨停数 + limit_up_count: int + #: 跌停数 + limit_down_count: int + #: 上涨数 + up_count: int + #: 下跌数 + down_count: int + #: 涨幅 + change_pct: float + #: 成交额 + turnover: float + + +class StockQuoteStatsModel(CustomModel): + #: 涨停数 + limit_up_count: int + #: 跌停数 + limit_down_count: int + #: 上涨数 + up_count: int + #: 下跌数 + down_count: int + #: 涨幅 + change_pct: float + #: 成交额 + turnover: float + + quotes: List[StockQuoteModel] + + +class TradingPlanModel(MixinModel): + stock_id: str + stock_code: str + stock_name: str + # 执行交易日 + trading_date: datetime + # 预期开盘涨跌幅 + expected_open_pct: float + buy_price: Optional[float] + sell_price: Optional[float] + # 操作理由 + trading_reason: str + # 交易信号 + trading_signal_type: TradingSignalType + # 执行状态 + status: ExecutionStatus = Field(default=ExecutionStatus.init) + # 复盘 + review: Optional[str] + + +class BuildTradingPlanModel(BaseModel): + stock_id: str + # 执行交易日 + trading_date: datetime + # 预期开盘涨跌幅 + expected_open_pct: float + buy_price: Optional[float] + sell_price: Optional[float] + # 操作理由 + trading_reason: str + # 交易信号 + trading_signal_type: TradingSignalType + + @field_validator("trading_date") + @classmethod + def trading_date_must_be_future(cls, v: str) -> str: + if to_pd_timestamp(v) < tomorrow_date(): + raise ValueError(f"trading_date: {v} must set to future trading date") + return v + + +class QueryTradingPlanModel(BaseModel): + time_range: TimeRange + + +# the __all__ is generated +__all__ = [ + "QueryTagQuoteModel", + "QueryStockQuoteSettingModel", + "BuildQueryStockQuoteSettingModel", + "QueryStockQuoteModel", + "StockQuoteModel", + "StockQuoteStatsModel", + "TradingPlanModel", + "BuildTradingPlanModel", + "QueryTradingPlanModel", +] diff --git a/src/zvt/trading/trading_schemas.py b/src/zvt/trading/trading_schemas.py new file mode 100644 index 00000000..d4f97b94 --- /dev/null +++ b/src/zvt/trading/trading_schemas.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +from sqlalchemy import Column, Float, DateTime +from sqlalchemy import String, JSON +from sqlalchemy.orm import declarative_base + +from zvt.contract import Mixin +from zvt.contract.register import register_schema + +TradingBase = declarative_base() + + +class TradingPlan(TradingBase, Mixin): + __tablename__ = "trading_plan" + stock_id = Column(String) + stock_code = Column(String) + stock_name = Column(String) + trading_date = Column(DateTime) + # 预期开盘涨跌幅 + expected_open_pct = Column(Float, nullable=False) + buy_price = Column(Float) + sell_price = Column(Float) + # 操作理由 + trading_reason = Column(String) + # 交易信号 + trading_signal_type = Column(String) + # 执行状态 + status = Column(String) + # 复盘 + review = Column(String) + + +class QueryStockQuoteSetting(TradingBase, Mixin): + __tablename__ = "query_stock_quote_setting" + stock_pool_name = Column(String) + main_tags = Column(JSON) + + +register_schema(providers=["zvt"], db_name="stock_trading", schema_base=TradingBase) + + +# the __all__ is generated +__all__ = ["TradingPlan", "QueryStockQuoteSetting"] diff --git a/src/zvt/trading/trading_service.py b/src/zvt/trading/trading_service.py new file mode 100644 index 00000000..61b09413 --- /dev/null +++ b/src/zvt/trading/trading_service.py @@ -0,0 +1,393 @@ +# -*- coding: utf-8 -*- +import logging +from typing import List + +import pandas as pd +from fastapi_pagination.ext.sqlalchemy import paginate + +import zvt.api.kdata as kdata_api +import zvt.contract.api as contract_api +from zvt.common.query_models import TimeUnit +from zvt.domain import Stock, StockQuote, Stock1mQuote +from zvt.tag.tag_schemas import StockTags, StockPools +from zvt.trading.common import ExecutionStatus +from zvt.trading.trading_models import ( + BuildTradingPlanModel, + QueryTradingPlanModel, + QueryTagQuoteModel, + QueryStockQuoteModel, + BuildQueryStockQuoteSettingModel, + KdataRequestModel, + TSRequestModel, +) +from zvt.trading.trading_schemas import TradingPlan, QueryStockQuoteSetting +from zvt.utils.pd_utils import pd_is_not_null +from zvt.utils.time_utils import ( + to_time_str, + to_pd_timestamp, + now_pd_timestamp, + date_time_by_interval, + current_date, + date_and_time, +) + +logger = logging.getLogger(__name__) + + +def query_kdata(kdata_request_model: KdataRequestModel): + kdata_df = kdata_api.get_kdata( + entity_ids=kdata_request_model.entity_ids, + provider=kdata_request_model.data_provider, + start_timestamp=kdata_request_model.start_timestamp, + end_timestamp=kdata_request_model.end_timestamp, + adjust_type=kdata_request_model.adjust_type, + ) + if pd_is_not_null(kdata_df): + kdata_df["timestamp"] = kdata_df["timestamp"].apply(lambda x: int(x.timestamp())) + kdata_df["data"] = kdata_df.apply( + lambda x: x[ + ["timestamp", "open", "high", "low", "close", "volume", "turnover", "change_pct", "turnover_rate"] + ].values.tolist(), + axis=1, + ) + df = kdata_df.groupby("entity_id").agg( + code=("code", "first"), + name=("name", "first"), + level=("level", "first"), + datas=("data", lambda data: list(data)), + ) + df = df.reset_index(drop=False) + return df.to_dict(orient="records") + + +def query_ts(ts_request_model: TSRequestModel): + trading_dates = kdata_api.get_recent_trade_dates(days_count=ts_request_model.days_count) + ts_df = Stock1mQuote.query_data( + entity_ids=ts_request_model.entity_ids, + provider=ts_request_model.data_provider, + start_timestamp=trading_dates[0], + ) + if pd_is_not_null(ts_df): + ts_df["data"] = ts_df.apply( + lambda x: x[ + ["time", "price", "avg_price", "change_pct", "volume", "turnover", "turnover_rate"] + ].values.tolist(), + axis=1, + ) + df = ts_df.groupby("entity_id").agg( + code=("code", "first"), + name=("name", "first"), + datas=("data", lambda data: list(data)), + ) + df = df.reset_index(drop=False) + return df.to_dict(orient="records") + + +def build_trading_plan(build_trading_plan_model: BuildTradingPlanModel): + with contract_api.DBSession(provider="zvt", data_schema=TradingPlan)() as session: + stock_id = build_trading_plan_model.stock_id + trading_date_str = to_time_str(build_trading_plan_model.trading_date) + trading_date = to_pd_timestamp(trading_date_str) + signal = build_trading_plan_model.trading_signal_type.value + plan_id = f"{stock_id}_{trading_date_str}_{signal}" + + datas = TradingPlan.query_data( + session=session, filters=[TradingPlan.id == plan_id], limit=1, return_type="domain" + ) + if datas: + assert len(datas) == 1 + plan = datas[0] + else: + datas = Stock.query_data(provider="em", entity_id=stock_id, return_type="domain") + stock = datas[0] + plan = TradingPlan( + id=plan_id, + entity_id=stock_id, + stock_id=stock_id, + stock_code=stock.code, + stock_name=stock.name, + trading_date=trading_date, + expected_open_pct=build_trading_plan_model.expected_open_pct, + buy_price=build_trading_plan_model.buy_price, + sell_price=build_trading_plan_model.sell_price, + trading_reason=build_trading_plan_model.trading_reason, + trading_signal_type=signal, + status=ExecutionStatus.init.value, + ) + plan.timestamp = now_pd_timestamp() + session.add(plan) + session.commit() + session.refresh(plan) + return plan + + +def query_trading_plan(query_trading_plan_model: QueryTradingPlanModel): + with contract_api.DBSession(provider="zvt", data_schema=TradingPlan)() as session: + time_range = query_trading_plan_model.time_range + if time_range.relative_time_range: + start_timestamp = date_time_by_interval( + current_date(), time_range.relative_time_range.interval, time_range.relative_time_range.time_unit + ) + end_timestamp = None + else: + start_timestamp = time_range.absolute_time_range.start_timestamp + end_timestamp = time_range.absolute_time_range.end_timestamp + selectable = TradingPlan.query_data( + session=session, start_timestamp=start_timestamp, end_timestamp=end_timestamp, return_type="select" + ) + return paginate(session, selectable) + + +def get_current_trading_plan(): + with contract_api.DBSession(provider="zvt", data_schema=TradingPlan)() as session: + return TradingPlan.query_data( + session=session, + filters=[TradingPlan.status == ExecutionStatus.pending.value], + order=TradingPlan.trading_date.asc(), + return_type="domain", + ) + + +def get_future_trading_plan(): + with contract_api.DBSession(provider="zvt", data_schema=TradingPlan)() as session: + return TradingPlan.query_data( + session=session, + filters=[TradingPlan.status == ExecutionStatus.init.value], + order=TradingPlan.trading_date.asc(), + return_type="domain", + ) + + +def check_trading_plan(): + with contract_api.DBSession(provider="zvt", data_schema=TradingPlan)() as session: + plans = TradingPlan.query_data( + session=session, + filters=[TradingPlan.status == ExecutionStatus.init.value, TradingPlan.trading_date == current_date()], + order=TradingPlan.trading_date.asc(), + return_type="domain", + ) + + logger.debug(f"current plans:{plans}") + + +def query_quote_stats(): + quote_df = StockQuote.query_data( + return_type="df", + filters=[StockQuote.change_pct >= -0.31, StockQuote.change_pct <= 0.31], + columns=["timestamp", "entity_id", "time", "change_pct", "turnover", "is_limit_up", "is_limit_down"], + ) + current_stats = cal_quote_stats(quote_df) + start_timestamp = current_stats["timestamp"] + + pre_date_df = Stock1mQuote.query_data( + filters=[Stock1mQuote.timestamp < to_time_str(start_timestamp)], + order=Stock1mQuote.timestamp.desc(), + limit=1, + columns=["timestamp"], + ) + pre_date = pre_date_df["timestamp"].tolist()[0] + + if start_timestamp.hour >= 15: + start_timestamp = date_and_time(pre_date, "15:00") + else: + start_timestamp = date_and_time(pre_date, f"{start_timestamp.hour}:{start_timestamp.minute}") + end_timestamp = date_time_by_interval(start_timestamp, 1, TimeUnit.minute) + + pre_df = Stock1mQuote.query_data( + return_type="df", + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + filters=[Stock1mQuote.change_pct >= -0.31, Stock1mQuote.change_pct <= 0.31], + columns=["timestamp", "entity_id", "time", "change_pct", "turnover", "is_limit_up", "is_limit_down"], + ) + + if pd_is_not_null(pre_df): + pre_stats = cal_quote_stats(pre_df) + current_stats["pre_turnover"] = pre_stats["turnover"] + current_stats["turnover_change"] = current_stats["turnover"] - current_stats["pre_turnover"] + return current_stats + + +def cal_quote_stats(quote_df): + quote_df["ss"] = 1 + + df = ( + quote_df.groupby("ss") + .agg( + timestamp=("timestamp", "last"), + time=("time", "last"), + up_count=("change_pct", lambda x: (x > 0).sum()), + down_count=("change_pct", lambda x: (x <= 0).sum()), + turnover=("turnover", "sum"), + change_pct=("change_pct", "mean"), + limit_up_count=("is_limit_up", "sum"), + limit_down_count=("is_limit_down", lambda x: (x == True).sum()), + ) + .reset_index(drop=True) + ) + + return df.to_dict(orient="records")[0] + + +def query_tag_quotes(query_tag_quote_model: QueryTagQuoteModel): + stock_pools: List[StockPools] = StockPools.query_data( + filters=[StockPools.stock_pool_name == query_tag_quote_model.stock_pool_name], + order=StockPools.timestamp.desc(), + limit=1, + return_type="domain", + ) + if stock_pools: + entity_ids = stock_pools[0].entity_ids + else: + entity_ids = None + + tag_df = StockTags.query_data( + entity_ids=entity_ids, + filters=[StockTags.main_tag.in_(query_tag_quote_model.main_tags)], + columns=[StockTags.entity_id, StockTags.main_tag], + return_type="df", + index="entity_id", + ) + + entity_ids = tag_df["entity_id"].tolist() + + quote_df = StockQuote.query_data(entity_ids=entity_ids, return_type="df", index="entity_id") + + df = pd.concat([tag_df, quote_df], axis=1) + grouped_df = ( + df.groupby("main_tag") + .agg( + up_count=("change_pct", lambda x: (x > 0).sum()), + down_count=("change_pct", lambda x: (x <= 0).sum()), + turnover=("turnover", "sum"), + change_pct=("change_pct", "mean"), + limit_up_count=("is_limit_up", "sum"), + limit_down_count=("is_limit_down", lambda x: (x == True).sum()), + total_count=("main_tag", "size"), # 添加计数,计算每个分组的总行数 + ) + .reset_index(drop=False) + ) + sorted_df = grouped_df.sort_values(by=["turnover", "total_count"], ascending=[False, False]) + + return sorted_df.to_dict(orient="records") + + +def query_stock_quotes(query_stock_quote_model: QueryStockQuoteModel): + entity_ids = None + if query_stock_quote_model.stock_pool_name: + stock_pools: List[StockPools] = StockPools.query_data( + filters=[StockPools.stock_pool_name == query_stock_quote_model.stock_pool_name], + order=StockPools.timestamp.desc(), + limit=1, + return_type="domain", + ) + if stock_pools: + entity_ids = stock_pools[0].entity_ids + else: + entity_ids = query_stock_quote_model.entity_ids + + if query_stock_quote_model.main_tag: + tags_dict = StockTags.query_data( + entity_ids=entity_ids, + filters=[StockTags.main_tag == query_stock_quote_model.main_tag], + return_type="dict", + ) + if not tags_dict: + return None + entity_ids = [item["entity_id"] for item in tags_dict] + else: + tags_dict = StockTags.query_data( + return_type="dict", + ) + + entity_tags_map = {item["entity_id"]: item for item in tags_dict} + + order = eval(f"StockQuote.{query_stock_quote_model.order_by_field}.{query_stock_quote_model.order_by_type.value}()") + + df = StockQuote.query_data(order=order, entity_ids=entity_ids, return_type="df") + + if not pd_is_not_null(df): + return None + + def set_tags(quote): + entity_id = quote["entity_id"] + main_tag = entity_tags_map.get(entity_id, {}).get("main_tag", None) + sub_tag = entity_tags_map.get(entity_id, {}).get("sub_tag", None) + active_hidden_tags = entity_tags_map.get(entity_id, {}).get("active_hidden_tags", None) + if active_hidden_tags: + hidden_tags = list(active_hidden_tags.keys()) + else: + hidden_tags = None + return pd.Series({"main_tag": main_tag, "sub_tag": sub_tag, "hidden_tags": hidden_tags}) + + df[["main_tag", "sub_tag", "hidden_tags"]] = df.apply(set_tags, axis=1) + + up_count = (df["change_pct"] > 0).sum() + down_count = (df["change_pct"] < 0).sum() + turnover = df["turnover"].sum() + change_pct = df["change_pct"].mean() + limit_up_count = df["is_limit_up"].sum() + limit_down_count = df["is_limit_down"].sum() + + quotes = df.to_dict(orient="records") + + result = { + "up_count": up_count, + "down_count": down_count, + "turnover": turnover, + "change_pct": change_pct, + "limit_up_count": limit_up_count, + "limit_down_count": limit_down_count, + "quotes": quotes[: query_stock_quote_model.limit], + } + return result + + +def buy_stocks(): + pass + + +def sell_stocks(): + pass + + +def build_query_stock_quote_setting(build_query_stock_quote_setting_model: BuildQueryStockQuoteSettingModel): + with contract_api.DBSession(provider="zvt", data_schema=QueryStockQuoteSetting)() as session: + the_id = "admin_setting" + datas = QueryStockQuoteSetting.query_data(ids=[the_id], session=session, return_type="domain") + if datas: + query_setting = datas[0] + else: + query_setting = QueryStockQuoteSetting(entity_id="admin", id=the_id) + query_setting.timestamp = current_date() + query_setting.stock_pool_name = build_query_stock_quote_setting_model.stock_pool_name + query_setting.main_tags = build_query_stock_quote_setting_model.main_tags + session.add(query_setting) + session.commit() + session.refresh(query_setting) + return query_setting + + +def build_default_query_stock_quote_setting(): + datas = QueryStockQuoteSetting.query_data(ids=["admin_setting"], return_type="domain") + if datas: + return + build_query_stock_quote_setting(BuildQueryStockQuoteSettingModel(stock_pool_name="all", main_tags=["消费电子"])) + + +if __name__ == "__main__": + # print(query_tag_quotes(QueryTagQuoteModel(stock_pool_name="all", main_tags=["低空经济", "半导体", "化工", "消费电子"]))) + # print(query_stock_quotes(QueryStockQuoteModel(stock_pool_name="all", main_tag="半导体"))) + print(query_quote_stats()) +# the __all__ is generated +__all__ = [ + "build_trading_plan", + "query_trading_plan", + "get_current_trading_plan", + "get_future_trading_plan", + "check_trading_plan", + "query_stock_quotes", + "buy_stocks", + "sell_stocks", + "build_query_stock_quote_setting", +] diff --git a/zvt/ui/__init__.py b/src/zvt/ui/__init__.py similarity index 58% rename from zvt/ui/__init__.py rename to src/zvt/ui/__init__.py index b6355a3d..7eeeff88 100644 --- a/zvt/ui/__init__.py +++ b/src/zvt/ui/__init__.py @@ -4,11 +4,13 @@ import dash import dash_bootstrap_components as dbc -assets_path = os.path.abspath(os.path.join(os.path.dirname(__file__), 'assets')) +assets_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "assets")) zvt_app = dash.Dash( - __name__, meta_tags=[{"name": "viewport", "content": "width=device-width"}], assets_folder=assets_path, - external_stylesheets=[dbc.themes.BOOTSTRAP] + __name__, + meta_tags=[{"name": "viewport", "content": "width=device-width"}], + assets_folder=assets_path, + external_stylesheets=[dbc.themes.BOOTSTRAP], ) zvt_app.config.suppress_callback_exceptions = True diff --git a/src/zvt/ui/apps/factor_app.py b/src/zvt/ui/apps/factor_app.py new file mode 100644 index 00000000..eb6a836c --- /dev/null +++ b/src/zvt/ui/apps/factor_app.py @@ -0,0 +1,341 @@ +# -*- coding: utf-8 -*- +from typing import List + +import dash_daq as daq +from dash import dash +from dash import dcc +from dash import html +from dash.dependencies import Input, Output, State + +from zvt.contract import Mixin +from zvt.contract import zvt_context, IntervalLevel +from zvt.contract.api import get_entities, get_schema_by_name, get_schema_columns +from zvt.contract.drawer import StackedDrawer +from zvt.trader.trader_info_api import AccountStatsReader, OrderReader, get_order_securities +from zvt.trader.trader_info_api import get_trader_info +from zvt.trader.trader_schemas import TraderInfo +from zvt.ui import zvt_app +from zvt.ui.components.dcc_components import get_account_stats_figure +from zvt.utils.pd_utils import pd_is_not_null + +account_readers = [] +order_readers = [] + +# init the data +traders: List[TraderInfo] = [] + +trader_names: List[str] = [] + + +def order_type_flag(order_type): + if order_type == "order_long" or order_type == "order_close_short": + return "B" + else: + return "S" + + +def order_type_color(order_type): + if order_type == "order_long" or order_type == "order_close_short": + return "#ec0000" + else: + return "#00da3c" + + +def load_traders(): + global traders + global trader_names + + traders = get_trader_info(return_type="domain") + account_readers.clear() + order_readers.clear() + for trader in traders: + account_readers.append(AccountStatsReader(level=trader.level, trader_names=[trader.trader_name])) + order_readers.append( + OrderReader(start_timestamp=trader.start_timestamp, level=trader.level, trader_names=[trader.trader_name]) + ) + + trader_names = [item.trader_name for item in traders] + + +load_traders() + + +def factor_layout(): + layout = html.Div( + [ + # controls + html.Div( + className="three columns card", + children=[ + html.Div( + className="bg-white user-control", + children=[ + html.Div( + className="padding-top-bot", + children=[ + html.H6("select trader:"), + dcc.Dropdown( + id="trader-selector", + placeholder="select the trader", + options=[{"label": item, "value": i} for i, item in enumerate(trader_names)], + ), + ], + ), + # select entity type + html.Div( + className="padding-top-bot", + children=[ + html.H6("select entity type:"), + dcc.Dropdown( + id="entity-type-selector", + placeholder="select entity type", + options=[ + {"label": name, "value": name} + for name in zvt_context.tradable_schema_map.keys() + ], + value="stock", + clearable=False, + ), + ], + ), + # select entity provider + html.Div( + className="padding-top-bot", + children=[ + html.H6("select entity provider:"), + dcc.Dropdown(id="entity-provider-selector", placeholder="select entity provider"), + ], + ), + # select entity + html.Div( + className="padding-top-bot", + children=[ + html.H6("select entity:"), + dcc.Dropdown(id="entity-selector", placeholder="select entity"), + ], + ), + # select levels + html.Div( + className="padding-top-bot", + children=[ + html.H6("select levels:"), + dcc.Dropdown( + id="levels-selector", + options=[ + {"label": level.name, "value": level.value} + for level in (IntervalLevel.LEVEL_1WEEK, IntervalLevel.LEVEL_1DAY) + ], + value="1d", + multi=True, + ), + ], + ), + # select factor + html.Div( + className="padding-top-bot", + children=[ + html.H6("select factor:"), + dcc.Dropdown( + id="factor-selector", + placeholder="select factor", + options=[ + {"label": name, "value": name} + for name in zvt_context.factor_cls_registry.keys() + ], + value="TechnicalFactor", + ), + ], + ), + # select data + html.Div( + children=[ + html.Div( + [ + html.H6( + "related/all data to show in sub graph", + style={"display": "inline-block"}, + ), + daq.BooleanSwitch( + id="data-switch", + on=True, + style={ + "display": "inline-block", + "float": "right", + "vertical-align": "middle", + "padding": "8px", + }, + ), + ], + ), + dcc.Dropdown(id="data-selector", placeholder="schema"), + ], + style={"padding-top": "12px"}, + ), + # select properties + html.Div( + children=[dcc.Dropdown(id="schema-column-selector", placeholder="properties")], + style={"padding-top": "6px"}, + ), + ], + ) + ], + ), + # Graph + html.Div( + className="nine columns card-left", + children=[ + html.Div( + id="trader-details", + className="bg-white", + ), + html.Div(id="factor-details"), + ], + ), + ] + ) + + return layout + + +@zvt_app.callback( + [ + Output("trader-details", "children"), + Output("entity-type-selector", "options"), + Output("entity-provider-selector", "options"), + Output("entity-selector", "options"), + ], + [ + Input("trader-selector", "value"), + Input("entity-type-selector", "value"), + Input("entity-provider-selector", "value"), + ], +) +def update_trader_details(trader_index, entity_type, entity_provider): + if trader_index is not None: + # change entity_type options + entity_type = traders[trader_index].entity_type + if not entity_type: + entity_type = "stock" + entity_type_options = [{"label": entity_type, "value": entity_type}] + + # account stats + account_stats = get_account_stats_figure(account_stats_reader=account_readers[trader_index]) + + providers = zvt_context.tradable_schema_map.get(entity_type).providers + entity_provider_options = [{"label": name, "value": name} for name in providers] + + # entities + entity_ids = get_order_securities(trader_name=trader_names[trader_index]) + df = get_entities( + provider=entity_provider, + entity_type=entity_type, + entity_ids=entity_ids, + columns=["entity_id", "code", "name"], + index="entity_id", + ) + entity_options = [ + {"label": f'{entity_id}({entity["name"]})', "value": entity_id} for entity_id, entity in df.iterrows() + ] + + return account_stats, entity_type_options, entity_provider_options, entity_options + else: + entity_type_options = [{"label": name, "value": name} for name in zvt_context.tradable_schema_map.keys()] + account_stats = None + providers = zvt_context.tradable_schema_map.get(entity_type).providers + entity_provider_options = [{"label": name, "value": name} for name in providers] + df = get_entities( + provider=entity_provider, entity_type=entity_type, columns=["entity_id", "code", "name"], index="entity_id" + ) + entity_options = [ + {"label": f'{entity_id}({entity["name"]})', "value": entity_id} for entity_id, entity in df.iterrows() + ] + return account_stats, entity_type_options, entity_provider_options, entity_options + + +@zvt_app.callback( + Output("data-selector", "options"), [Input("entity-type-selector", "value"), Input("data-switch", "on")] +) +def update_entity_selector(entity_type, related): + if entity_type is not None: + if related: + schemas = zvt_context.entity_map_schemas.get(entity_type) + else: + schemas = zvt_context.schemas + return [{"label": schema.__name__, "value": schema.__name__} for schema in schemas] + raise dash.PreventUpdate() + + +@zvt_app.callback(Output("schema-column-selector", "options"), [Input("data-selector", "value")]) +def update_column_selector(schema_name): + if schema_name: + schema = get_schema_by_name(name=schema_name) + cols = get_schema_columns(schema=schema) + + return [{"label": col, "value": col} for col in cols] + raise dash.PreventUpdate() + + +@zvt_app.callback( + Output("factor-details", "children"), + [ + Input("factor-selector", "value"), + Input("entity-type-selector", "value"), + Input("entity-selector", "value"), + Input("levels-selector", "value"), + Input("schema-column-selector", "value"), + ], + [State("trader-selector", "value"), State("data-selector", "value")], +) +def update_factor_details(factor, entity_type, entity, levels, columns, trader_index, schema_name): + if factor and entity_type and entity and levels: + sub_df = None + # add sub graph + if columns: + if type(columns) == str: + columns = [columns] + columns = columns + ["entity_id", "timestamp"] + schema: Mixin = get_schema_by_name(name=schema_name) + sub_df = schema.query_data(entity_id=entity, columns=columns) + + # add trading signals as annotation + annotation_df = None + if trader_index is not None: + order_reader = order_readers[trader_index] + annotation_df = order_reader.data_df.copy() + annotation_df = annotation_df[annotation_df.entity_id == entity].copy() + if pd_is_not_null(annotation_df): + annotation_df["value"] = annotation_df["order_price"] + annotation_df["flag"] = annotation_df["order_type"].apply(lambda x: order_type_flag(x)) + annotation_df["color"] = annotation_df["order_type"].apply(lambda x: order_type_color(x)) + print(annotation_df.tail()) + + if type(levels) is list and len(levels) >= 2: + levels.sort() + drawers = [] + for level in levels: + drawers.append( + zvt_context.factor_cls_registry[factor]( + entity_schema=zvt_context.tradable_schema_map[entity_type], level=level, entity_ids=[entity] + ).drawer() + ) + stacked = StackedDrawer(*drawers) + + return dcc.Graph(id=f"{factor}-{entity_type}-{entity}", figure=stacked.draw_kline(show=False, height=900)) + else: + if type(levels) is list: + level = levels[0] + else: + level = levels + drawer = zvt_context.factor_cls_registry[factor]( + entity_schema=zvt_context.tradable_schema_map[entity_type], + level=level, + entity_ids=[entity], + need_persist=False, + ).drawer() + if pd_is_not_null(sub_df): + drawer.add_sub_df(sub_df) + if pd_is_not_null(annotation_df): + drawer.annotation_df = annotation_df + + return dcc.Graph(id=f"{factor}-{entity_type}-{entity}", figure=drawer.draw_kline(show=False, height=800)) + raise dash.PreventUpdate() diff --git a/src/zvt/ui/assets/__init__.py b/src/zvt/ui/assets/__init__.py new file mode 100644 index 00000000..40a96afc --- /dev/null +++ b/src/zvt/ui/assets/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/src/zvt/ui/assets/base.css b/src/zvt/ui/assets/base.css new file mode 100644 index 00000000..a4fd4963 --- /dev/null +++ b/src/zvt/ui/assets/base.css @@ -0,0 +1,612 @@ +/* Table of contents +–––––––––––––––––––––––––––––––––––––––––––––––––– +- Plotly.js +- Grid +- Base Styles +- Typography +- Links +- Buttons +- Forms +- Lists +- Code +- Tables +- Spacing +- Utilities +- Clearing +- Media Queries +*/ + +/* PLotly.js +–––––––––––––––––––––––––––––––––––––––––––––––––– */ +/* plotly.js's modebar's z-index is 1001 by default + * https://github.com/plotly/plotly.js/blob/7e4d8ab164258f6bd48be56589dacd9bdd7fded2/src/css/_modebar.scss#L5 + * In case a dropdown is above the graph, the dropdown's options + * will be rendered below the modebar + * Increase the select option's z-index + */ + +/* This was actually not quite right - + dropdowns were overlapping each other (edited October 26) + +.Select { + z-index: 1002; +}*/ + +/* Grid +–––––––––––––––––––––––––––––––––––––––––––––––––– */ +.container { + position: relative; + width: 100%; + max-width: 960px; + margin: 0 auto; + padding: 0 20px; + box-sizing: border-box; +} + +.column, +.columns { + width: 100%; + float: left; + box-sizing: border-box; +} + +/* For devices larger than 400px */ +@media (min-width: 400px) { + .container { + width: 85%; + padding: 0; + } +} + +/* For devices larger than 550px */ +@media (min-width: 550px) { + .container { + width: 80%; + } + + .column, + .columns { + margin-left: 4%; + } + + .column:first-child, + .columns:first-child { + margin-left: 0; + } + + .one.column, + .one.columns { + width: 4.66666666667%; + } + + .two.columns { + width: 13.3333333333%; + } + + .three.columns { + width: 22%; + } + + .four.columns { + width: 30.6666666667%; + } + + .five.columns { + width: 39.3333333333%; + } + + .six.columns { + width: 48%; + } + + .seven.columns { + width: 56.6666666667%; + } + + .eight.columns { + width: 65.3333333333%; + } + + .nine.columns { + width: 74.0%; + } + + .ten.columns { + width: 82.6666666667%; + } + + .eleven.columns { + width: 91.3333333333%; + } + + .twelve.columns { + width: 100%; + margin-left: 0; + } + + .one-third.column { + width: 30.6666666667%; + } + + .two-thirds.column { + width: 65.3333333333%; + } + + .one-half.column { + width: 48%; + } + + /* Offsets */ + .offset-by-one.column, + .offset-by-one.columns { + margin-left: 8.66666666667%; + } + + .offset-by-two.column, + .offset-by-two.columns { + margin-left: 17.3333333333%; + } + + .offset-by-three.column, + .offset-by-three.columns { + margin-left: 26%; + } + + .offset-by-four.column, + .offset-by-four.columns { + margin-left: 34.6666666667%; + } + + .offset-by-five.column, + .offset-by-five.columns { + margin-left: 43.3333333333%; + } + + .offset-by-six.column, + .offset-by-six.columns { + margin-left: 52%; + } + + .offset-by-seven.column, + .offset-by-seven.columns { + margin-left: 60.6666666667%; + } + + .offset-by-eight.column, + .offset-by-eight.columns { + margin-left: 69.3333333333%; + } + + .offset-by-nine.column, + .offset-by-nine.columns { + margin-left: 78.0%; + } + + .offset-by-ten.column, + .offset-by-ten.columns { + margin-left: 86.6666666667%; + } + + .offset-by-eleven.column, + .offset-by-eleven.columns { + margin-left: 95.3333333333%; + } + + .offset-by-one-third.column, + .offset-by-one-third.columns { + margin-left: 34.6666666667%; + } + + .offset-by-two-thirds.column, + .offset-by-two-thirds.columns { + margin-left: 69.3333333333%; + } + + .offset-by-one-half.column, + .offset-by-one-half.columns { + margin-left: 52%; + } + +} + + +/* Base Styles +–––––––––––––––––––––––––––––––––––––––––––––––––– */ +/* NOTE +html is set to 62.5% so that all the REM measurements throughout Skeleton +are based on 10px sizing. So basically 1.5rem = 15px :) */ +html { + font-size: 62.5%; +} + +body { + font-size: 1.5em; /* currently ems cause chrome bug misinterpreting rems on body element */ + line-height: 1.6; + font-weight: 400; + font-family: "Open Sans", "HelveticaNeue", "Helvetica Neue", Helvetica, Arial, sans-serif; + color: rgb(50, 50, 50); +} + + +/* Typography +–––––––––––––––––––––––––––––––––––––––––––––––––– */ +h1, h2, h3, h4, h5, h6 { + margin-top: 0; + margin-bottom: 0; + font-weight: 300; +} + +h1 { + font-size: 4.5rem; + line-height: 1.2; + letter-spacing: -.1rem; + margin-bottom: 2rem; +} + +h2 { + font-size: 3.6rem; + line-height: 1.25; + letter-spacing: -.1rem; + margin-bottom: 1.8rem; + margin-top: 1.8rem; +} + +h3 { + font-size: 3.0rem; + line-height: 1.3; + letter-spacing: -.1rem; + margin-bottom: 1.5rem; + margin-top: 1.5rem; +} + +h4 { + font-size: 2.6rem; + line-height: 1.35; + letter-spacing: -.08rem; + margin-bottom: 1.2rem; + margin-top: 1.2rem; +} + +h5 { + font-size: 2.2rem; + line-height: 1.5; + letter-spacing: -.05rem; + margin-bottom: 0.6rem; + margin-top: 0.6rem; +} + +h6 { + font-size: 2.0rem; + line-height: 1.6; + letter-spacing: 0; + margin-bottom: 0.75rem; + margin-top: 0.75rem; +} + +p { + margin-top: 0; +} + + +/* Blockquotes +–––––––––––––––––––––––––––––––––––––––––––––––––– */ +blockquote { + border-left: 4px lightgrey solid; + padding-left: 1rem; + margin-top: 2rem; + margin-bottom: 2rem; + margin-left: 0rem; +} + + +/* Links +–––––––––––––––––––––––––––––––––––––––––––––––––– */ +a { + color: #1EAEDB; + text-decoration: underline; + cursor: pointer; +} + +a:hover { + color: #0FA0CE; +} + + +/* Buttons +–––––––––––––––––––––––––––––––––––––––––––––––––– */ +.button, +button, +input[type="submit"], +input[type="reset"], +input[type="button"] { + display: inline-block; + height: 38px; + padding: 0 30px; + color: #555; + text-align: center; + font-size: 11px; + font-weight: 600; + line-height: 38px; + letter-spacing: .1rem; + text-transform: uppercase; + text-decoration: none; + white-space: nowrap; + background-color: transparent; + border-radius: 4px; + border: 1px solid #bbb; + cursor: pointer; + box-sizing: border-box; +} + +.button:hover, +button:hover, +input[type="submit"]:hover, +input[type="reset"]:hover, +input[type="button"]:hover, +.button:focus, +button:focus, +input[type="submit"]:focus, +input[type="reset"]:focus, +input[type="button"]:focus { + color: #333; + border-color: #888; + outline: 0; +} + +.button.button-primary, +button.button-primary, +input[type="submit"].button-primary, +input[type="reset"].button-primary, +input[type="button"].button-primary { + color: #FFF; + background-color: #33C3F0; + border-color: #33C3F0; +} + +.button.button-primary:hover, +button.button-primary:hover, +input[type="submit"].button-primary:hover, +input[type="reset"].button-primary:hover, +input[type="button"].button-primary:hover, +.button.button-primary:focus, +button.button-primary:focus, +input[type="submit"].button-primary:focus, +input[type="reset"].button-primary:focus, +input[type="button"].button-primary:focus { + color: #FFF; + background-color: #1EAEDB; + border-color: #1EAEDB; +} + + +/* Forms +–––––––––––––––––––––––––––––––––––––––––––––––––– */ +input[type="email"], +input[type="number"], +input[type="search"], +input[type="text"], +input[type="tel"], +input[type="url"], +input[type="password"], +textarea, +select { + height: 38px; + padding: 6px 10px; /* The 6px vertically centers text on FF, ignored by Webkit */ + background-color: #fff; + border: 1px solid #D1D1D1; + border-radius: 4px; + box-shadow: none; + box-sizing: border-box; + font-family: inherit; + font-size: inherit; /*https://stackoverflow.com/questions/6080413/why-doesnt-input-inherit-the-font-from-body*/ +} + +/* Removes awkward default styles on some inputs for iOS */ +input[type="email"], +input[type="number"], +input[type="search"], +input[type="text"], +input[type="tel"], +input[type="url"], +input[type="password"], +textarea { + -webkit-appearance: none; + -moz-appearance: none; + appearance: none; +} + +textarea { + min-height: 65px; + padding-top: 6px; + padding-bottom: 6px; +} + +input[type="email"]:focus, +input[type="number"]:focus, +input[type="search"]:focus, +input[type="text"]:focus, +input[type="tel"]:focus, +input[type="url"]:focus, +input[type="password"]:focus, +textarea:focus, +select:focus { + border: 1px solid #33C3F0; + outline: 0; +} + +label, +legend { + display: block; + margin-bottom: 0px; +} + +fieldset { + padding: 0; + border-width: 0; +} + +input[type="checkbox"], +input[type="radio"] { + display: inline; +} + +label > .label-body { + display: inline-block; + margin-left: .5rem; + font-weight: normal; +} + + +/* Lists +–––––––––––––––––––––––––––––––––––––––––––––––––– */ +ul { + list-style: circle inside; +} + +ol { + list-style: decimal inside; +} + +ol, ul { + padding-left: 0; + margin-top: 0; +} + +ul ul, +ul ol, +ol ol, +ol ul { + margin: 1.5rem 0 1.5rem 3rem; + font-size: 90%; +} + +li { + margin-bottom: 1rem; +} + + +/* Tables +–––––––––––––––––––––––––––––––––––––––––––––––––– */ +table { + border-collapse: collapse; +} + +th, +td { + padding: 12px 15px; + text-align: left; + border-bottom: 1px solid #E1E1E1; +} + +th:first-child, +td:first-child { + padding-left: 0; +} + +th:last-child, +td:last-child { + padding-right: 0; +} + + +/* Spacing +–––––––––––––––––––––––––––––––––––––––––––––––––– */ +button, +.button { + margin-bottom: 0rem; +} + +input, +textarea, +select, +fieldset { + margin-bottom: 0rem; +} + +pre, +dl, +figure, +table, +form { + margin-bottom: 0rem; +} + +p, +ul, +ol { + margin-bottom: 0.75rem; +} + +/* Utilities +–––––––––––––––––––––––––––––––––––––––––––––––––– */ +.u-full-width { + width: 100%; + box-sizing: border-box; +} + +.u-max-full-width { + max-width: 100%; + box-sizing: border-box; +} + +.u-pull-right { + float: right; +} + +.u-pull-left { + float: left; +} + + +/* Misc +–––––––––––––––––––––––––––––––––––––––––––––––––– */ +hr { + margin-top: 3rem; + margin-bottom: 3.5rem; + border-width: 0; + border-top: 1px solid #E1E1E1; +} + + +/* Clearing +–––––––––––––––––––––––––––––––––––––––––––––––––– */ + +/* Self Clearing Goodness */ +.container:after, +.row:after, +.u-cf { + content: ""; + display: table; + clear: both; +} + + +/* Media Queries +–––––––––––––––––––––––––––––––––––––––––––––––––– */ +/* +Note: The best way to structure the use of media queries is to create the queries +near the relevant code. For example, if you wanted to change the styles for buttons +on small devices, paste the mobile query code up in the buttons section and style it +there. +*/ + + +/* Larger than mobile */ +@media (min-width: 400px) { +} + +/* Larger than phablet (also point when grid becomes active) */ +@media (min-width: 550px) { +} + +/* Larger than tablet */ +@media (min-width: 750px) { +} + +/* Larger than desktop */ +@media (min-width: 1000px) { +} + +/* Larger than Desktop HD */ +@media (min-width: 1200px) { +} \ No newline at end of file diff --git a/zvt/ui/assets/custom.css b/src/zvt/ui/assets/custom.css similarity index 100% rename from zvt/ui/assets/custom.css rename to src/zvt/ui/assets/custom.css diff --git a/src/zvt/ui/components/__init__.py b/src/zvt/ui/components/__init__.py new file mode 100644 index 00000000..40a96afc --- /dev/null +++ b/src/zvt/ui/components/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/src/zvt/ui/components/dcc_components.py b/src/zvt/ui/components/dcc_components.py new file mode 100644 index 00000000..a877c533 --- /dev/null +++ b/src/zvt/ui/components/dcc_components.py @@ -0,0 +1,71 @@ +# -*- coding: utf-8 -*- + +from dash import dcc + +from zvt.api.kdata import get_kdata_schema +from zvt.contract import zvt_context +from zvt.contract.api import decode_entity_id +from zvt.contract.drawer import Drawer +from zvt.contract.reader import DataReader +from zvt.trader.trader_info_api import OrderReader, AccountStatsReader +from zvt.utils.pd_utils import pd_is_not_null + + +def order_type_color(order_type): + if order_type == "order_long" or order_type == "order_close_short": + return "#ec0000" + else: + return "#00da3c" + + +def order_type_flag(order_type): + if order_type == "order_long" or order_type == "order_close_short": + return "B" + else: + return "S" + + +def get_trading_signals_figure( + order_reader: OrderReader, entity_id: str, start_timestamp=None, end_timestamp=None, adjust_type=None +): + entity_type, _, _ = decode_entity_id(entity_id) + + data_schema = get_kdata_schema(entity_type=entity_type, level=order_reader.level, adjust_type=adjust_type) + if not start_timestamp: + start_timestamp = order_reader.start_timestamp + if not end_timestamp: + end_timestamp = order_reader.end_timestamp + kdata_reader = DataReader( + data_schema=data_schema, + entity_schema=zvt_context.tradable_schema_map.get(entity_type), + entity_ids=[entity_id], + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + level=order_reader.level, + ) + + # generate the annotation df + order_reader.move_on(timeout=0) + df = order_reader.data_df.copy() + df = df[df.entity_id == entity_id].copy() + if pd_is_not_null(df): + df["value"] = df["order_price"] + df["flag"] = df["order_type"].apply(lambda x: order_type_flag(x)) + df["color"] = df["order_type"].apply(lambda x: order_type_color(x)) + print(df.tail()) + + drawer = Drawer(main_df=kdata_reader.data_df, annotation_df=df) + return drawer.draw_kline(show=False, height=800) + + +def get_account_stats_figure(account_stats_reader: AccountStatsReader): + graph_list = [] + + # 账户统计曲线 + if account_stats_reader: + fig = account_stats_reader.draw_line(show=False) + + for trader_name in account_stats_reader.trader_names: + graph_list.append(dcc.Graph(id="{}-account".format(trader_name), figure=fig)) + + return graph_list diff --git a/src/zvt/utils/__init__.py b/src/zvt/utils/__init__.py new file mode 100644 index 00000000..938e1239 --- /dev/null +++ b/src/zvt/utils/__init__.py @@ -0,0 +1,2 @@ +# the __all__ is generated +__all__ = [] diff --git a/src/zvt/utils/decorator.py b/src/zvt/utils/decorator.py new file mode 100644 index 00000000..3d6ef646 --- /dev/null +++ b/src/zvt/utils/decorator.py @@ -0,0 +1,11 @@ +# -*- coding: utf-8 -*- +def to_string(cls): + def __str__(self): + return "%s(%s)" % (type(self).__name__, ", ".join("%s=%s" % item for item in vars(self).items())) + + cls.__str__ = __str__ + return cls + + +# the __all__ is generated +__all__ = ["to_string"] diff --git a/zvt/utils/file_utils.py b/src/zvt/utils/file_utils.py similarity index 61% rename from zvt/utils/file_utils.py rename to src/zvt/utils/file_utils.py index 15fd4333..a8299659 100644 --- a/zvt/utils/file_utils.py +++ b/src/zvt/utils/file_utils.py @@ -1,12 +1,15 @@ # -*- coding: utf-8 -*- import os -from typing import List +from typing import List, Optional -def list_all_files(dir_path: str = './domain', ext: str = '.py', excludes=None, return_base_name=False) -> List[str]: +def list_all_files( + dir_path: str = "./domain", ext: Optional[str] = ".py", excludes=None, includes=None, return_base_name=False +) -> List[str]: """ list all files with extension in specific directory recursively + :param includes: including files, None means all :param dir_path: the directory path :param ext: file extension :param excludes: excluding files @@ -16,11 +19,13 @@ def list_all_files(dir_path: str = './domain', ext: str = '.py', excludes=None, files = [] for entry in os.scandir(dir_path): if entry.is_dir(): - files += list_all_files(entry.path) + files += list_all_files(entry.path, ext=ext, excludes=excludes, return_base_name=return_base_name) elif entry.is_file(): if not ext or (ext and entry.path.endswith(ext)): if excludes and entry.path.endswith(excludes): continue + if includes and not entry.path.endswith(includes): + continue if return_base_name: files.append(os.path.basename(entry.path)) else: @@ -28,3 +33,7 @@ def list_all_files(dir_path: str = './domain', ext: str = '.py', excludes=None, else: pass return files + + +# the __all__ is generated +__all__ = ["list_all_files"] diff --git a/src/zvt/utils/git_utils.py b/src/zvt/utils/git_utils.py new file mode 100644 index 00000000..d68c0802 --- /dev/null +++ b/src/zvt/utils/git_utils.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +import subprocess + + +def get_git_user_name(): + try: + return subprocess.check_output(["git", "config", "--get", "user.name"]).decode("utf8").strip() + except: + return "foolcage" + + +def get_git_user_email(): + try: + return subprocess.check_output(["git", "config", "--get", "user.email"]).decode("utf8").strip() + except: + return "" + + +# the __all__ is generated +__all__ = ["get_git_user_name", "get_git_user_email"] diff --git a/src/zvt/utils/model_utils.py b/src/zvt/utils/model_utils.py new file mode 100644 index 00000000..9690fa43 --- /dev/null +++ b/src/zvt/utils/model_utils.py @@ -0,0 +1,9 @@ +# -*- coding: utf-8 -*- +def update_model(db_model, schema): + for key, value in schema.dict().items(): + if value is not None: + setattr(db_model, key, value) + + +# the __all__ is generated +__all__ = ["update_model"] diff --git a/src/zvt/utils/pd_utils.py b/src/zvt/utils/pd_utils.py new file mode 100644 index 00000000..e0a62ebe --- /dev/null +++ b/src/zvt/utils/pd_utils.py @@ -0,0 +1,137 @@ +# -*- coding: utf-8 -*- +from typing import List, Union + +import pandas as pd + + +def drop_continue_duplicate(s: Union[pd.Series, pd.DataFrame], col=None): + if type(s) == pd.Series: + return s[s.shift() != s] + if type(s) == pd.DataFrame: + ss = s[col] + selected = ss[ss.shift() != ss] + return s.loc[selected.index, :] + + +def is_filter_result_df(df: pd.DataFrame): + return pd_is_not_null(df) and "filter_result" in df.columns + + +def is_score_result_df(df: pd.DataFrame): + return pd_is_not_null(df) and "score_result" in df.columns + + +def pd_is_not_null(df: Union[pd.DataFrame, pd.Series]): + return df is not None and not df.empty + + +def group_by_entity_id(input_df: pd.DataFrame): + return input_df.groupby(level=0) + + +def normalize_group_compute_result(group_result): + if group_result.index.nlevels == 3: + return group_result.reset_index(level=0, drop=True) + return group_result + + +def merge_filter_result(input_df: pd.DataFrame, filter_result: pd.Series): + if is_filter_result_df(input_df): + input_df["filter_result"] = input_df["filter_result"] & filter_result + else: + input_df["filter_result"] = filter_result + + return input_df + + +def index_df(df, index="timestamp", inplace=True, drop=False, time_field="timestamp"): + if time_field: + df[time_field] = pd.to_datetime(df[time_field]) + + if inplace: + df.set_index(index, drop=drop, inplace=inplace) + else: + df = df.set_index(index, drop=drop, inplace=inplace) + + if type(index) == str: + df = df.sort_index() + elif type(index) == list: + df.index.names = index + level = list(range(len(index))) + df = df.sort_index(level=level) + return df + + +def normal_index_df(df, category_field="entity_id", time_filed="timestamp", drop=True, default_entity="entity"): + if type(df) == pd.Series: + df = df.to_frame(name="value") + + index = [category_field, time_filed] + if is_normal_df(df): + return df + + if df.index.nlevels == 1: + if (time_filed != df.index.name) and (time_filed not in df.columns): + assert False + if category_field not in df.columns: + df[category_field] = default_entity + if time_filed not in df.columns: + df = df.reset_index() + + return index_df(df=df, index=index, drop=drop, time_field="timestamp") + + +def is_normal_df(df, category_field="entity_id", time_filed="timestamp"): + if pd_is_not_null(df) and df.index.nlevels == 2: + names = df.index.names + + if len(names) == 2 and names[0] == category_field and names[1] == time_filed: + return True + + return False + + +def df_subset(df, columns=None): + if columns: + return df.loc[:, columns] + return df + + +def fill_with_same_index(df_list: List[pd.DataFrame]): + idx = None + for df in df_list: + if idx is None: + idx = df.index + else: + idx = idx.append(df.index).drop_duplicates() + idx = idx.sort_values() + + result = [] + for df in df_list: + # print(df[df.index.duplicated()]) + added_index = idx.difference(df.index.drop_duplicates()) + added_df = pd.DataFrame(index=added_index, columns=df.columns) + + # df1 = df.reindex(idx) + # df1 = df.append(added_df) + df1 = pd.concat([df, added_df]) + df1 = df1.sort_index() + result.append(df1) + return result + + +# the __all__ is generated +__all__ = [ + "drop_continue_duplicate", + "is_filter_result_df", + "is_score_result_df", + "pd_is_not_null", + "group_by_entity_id", + "normalize_group_compute_result", + "merge_filter_result", + "index_df", + "normal_index_df", + "is_normal_df", + "df_subset", + "fill_with_same_index", +] diff --git a/src/zvt/utils/recorder_utils.py b/src/zvt/utils/recorder_utils.py new file mode 100644 index 00000000..a083580b --- /dev/null +++ b/src/zvt/utils/recorder_utils.py @@ -0,0 +1,69 @@ +# -*- coding: utf-8 -*- +import logging +import time +from typing import Type + +import zvt as zvt +from zvt import zvt_config +from zvt.informer import EmailInformer + +logger = logging.getLogger("__name__") + + +def run_data_recorder( + domain: Type["zvt.contract.Mixin"], + entity_provider=None, + data_provider=None, + entity_ids=None, + retry_times=10, + sleeping_time=10, + return_unfinished=False, + **recorder_kv, +): + logger.info(f" record data: {domain.__name__}, entity_provider: {entity_provider}, data_provider: {data_provider}") + + unfinished_entity_ids = entity_ids + email_action = EmailInformer() + + while retry_times > 0: + try: + if return_unfinished: + unfinished_entity_ids = domain.record_data( + entity_ids=unfinished_entity_ids, + provider=data_provider, + sleeping_time=sleeping_time, + return_unfinished=return_unfinished, + **recorder_kv, + ) + if unfinished_entity_ids: + logger.info(f"unfinished_entity_ids({len(unfinished_entity_ids)}): {unfinished_entity_ids}") + raise Exception("Would retry with unfinished latter!") + else: + domain.record_data( + entity_ids=entity_ids, + provider=data_provider, + sleeping_time=sleeping_time, + return_unfinished=return_unfinished, + **recorder_kv, + ) + + msg = f"record {domain.__name__} success" + logger.info(msg) + email_action.send_message(zvt_config["email_username"], msg, msg) + break + except Exception as e: + logger.exception("report error:{}".format(e)) + time.sleep(60 * 2) + retry_times = retry_times - 1 + if retry_times == 0: + email_action.send_message( + zvt_config["email_username"], + f"record {domain.__name__} error", + f"record {domain.__name__} error: {e}", + ) + + +if __name__ == "__main__": + run_data_recorder() +# the __all__ is generated +__all__ = ["run_data_recorder"] diff --git a/src/zvt/utils/str_utils.py b/src/zvt/utils/str_utils.py new file mode 100644 index 00000000..bde051b0 --- /dev/null +++ b/src/zvt/utils/str_utils.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- + + +def to_snake_str(input: str) -> str: + parts = [] + part = "" + for c in input: + if c.isupper() or c.isdigit(): + if part: + parts.append(part) + part = c.lower() + else: + part = part + c + + parts.append(part) + + if len(parts) > 1: + return "_".join(parts) + elif parts: + return parts[0] + + +def to_camel_str(input: str) -> str: + parts = input.split("_") + domain_name = "" + for part in parts: + domain_name = domain_name + part.capitalize() + return domain_name + + +# the __all__ is generated +__all__ = ["to_snake_str", "to_camel_str"] diff --git a/src/zvt/utils/time_utils.py b/src/zvt/utils/time_utils.py new file mode 100644 index 00000000..f5a57ecb --- /dev/null +++ b/src/zvt/utils/time_utils.py @@ -0,0 +1,248 @@ +# -*- coding: utf-8 -*- +import calendar +import datetime + +import arrow +import pandas as pd + +from zvt.common.query_models import TimeUnit + +CHINA_TZ = "Asia/Shanghai" + +TIME_FORMAT_ISO8601 = "YYYY-MM-DDTHH:mm:ss.SSS" + +TIME_FORMAT_MON = "YYYY-MM" + +TIME_FORMAT_DAY = "YYYY-MM-DD" + +TIME_FORMAT_DAY1 = "YYYYMMDD" + +TIME_FORMAT_MINUTE = "YYYYMMDDHHmm" + +TIME_FORMAT_SECOND = "YYYYMMDDHHmmss" + +TIME_FORMAT_MINUTE1 = "HH:mm" + +TIME_FORMAT_MINUTE2 = "YYYY-MM-DD HH:mm:ss" + + +# ms(int) or second(float) or str +def to_pd_timestamp(the_time) -> pd.Timestamp: + if the_time is None: + return None + if type(the_time) == int: + return pd.Timestamp.fromtimestamp(the_time / 1000) + + if type(the_time) == float: + return pd.Timestamp.fromtimestamp(the_time) + + return pd.Timestamp(the_time) + + +def get_local_timezone(): + now = datetime.datetime.now() + local_now = now.astimezone() + local_tz = local_now.tzinfo + return local_tz + + +def to_timestamp(the_time): + return int(to_pd_timestamp(the_time).tz_localize(get_local_timezone()).timestamp() * 1000) + + +def now_timestamp(): + return int(pd.Timestamp.utcnow().timestamp() * 1000) + + +def now_pd_timestamp() -> pd.Timestamp: + return pd.Timestamp.now() + + +def today() -> pd.Timestamp: + return pd.Timestamp.today() + + +def current_date() -> pd.Timestamp: + return to_pd_timestamp(today().date()) + + +def tomorrow_date(): + return to_pd_timestamp(date_time_by_interval(today(), 1).date()) + + +def to_time_str(the_time, fmt=TIME_FORMAT_DAY): + try: + return arrow.get(to_pd_timestamp(the_time)).format(fmt) + except Exception as e: + return the_time + + +def now_time_str(fmt=TIME_FORMAT_DAY): + return to_time_str(the_time=now_pd_timestamp(), fmt=fmt) + + +def recent_year_date(): + return date_time_by_interval(current_date(), -365) + + +def date_time_by_interval(the_time, interval=1, unit: TimeUnit = TimeUnit.day): + time_delta = None + if unit == TimeUnit.year: + time_delta = datetime.timedelta(days=interval * 365) + elif unit == TimeUnit.month: + time_delta = datetime.timedelta(days=interval * 30) + elif unit == TimeUnit.day: + time_delta = datetime.timedelta(days=interval) + elif unit == TimeUnit.minute: + time_delta = datetime.timedelta(minutes=interval) + elif unit == TimeUnit.second: + time_delta = datetime.timedelta(seconds=interval) + + return to_pd_timestamp(the_time) + time_delta + + +def pre_month(t=now_pd_timestamp()): + t = to_pd_timestamp(t) + t = t.replace(day=1) + if t.month > 1: + year = t.year + month = t.month - 1 + else: + year = t.year - 1 + month = 12 + last_valid_date = t.replace(year=year, month=month) + return last_valid_date + + +def pre_month_start_date(t=current_date()): + return month_start_date(pre_month(t)) + + +def pre_month_end_date(t=current_date()): + return month_end_date(pre_month(t)) + + +def month_start_date(the_date): + the_date = to_pd_timestamp(the_date) + return the_date.replace(day=1) + + +def month_end_date(the_date): + the_date = to_pd_timestamp(the_date) + + _, day = calendar.monthrange(the_date.year, the_date.month) + return the_date.replace(day=day) + + +def month_start_end_ranges(start_date, end_date): + days = pd.date_range(start=start_date, end=end_date, freq="M") + return [(month_start_date(d), month_end_date(d)) for d in days] + + +def is_same_date(one, two): + return to_pd_timestamp(one).date() == to_pd_timestamp(two).date() + + +def is_same_time(one, two): + return to_timestamp(one) == to_timestamp(two) + + +def get_year_quarter(time): + time = to_pd_timestamp(time) + return time.year, ((time.month - 1) // 3) + 1 + + +def day_offset_today(offset=0): + return now_pd_timestamp() + datetime.timedelta(days=offset) + + +def get_year_quarters(start, end=pd.Timestamp.now()): + start_year_quarter = get_year_quarter(start) + current_year_quarter = get_year_quarter(end) + if current_year_quarter[0] == start_year_quarter[0]: + return [(current_year_quarter[0], x) for x in range(start_year_quarter[1], current_year_quarter[1] + 1)] + elif current_year_quarter[0] - start_year_quarter[0] == 1: + return [(start_year_quarter[0], x) for x in range(start_year_quarter[1], 5)] + [ + (current_year_quarter[0], x) for x in range(1, current_year_quarter[1] + 1) + ] + elif current_year_quarter[0] - start_year_quarter[0] > 1: + return ( + [(start_year_quarter[0], x) for x in range(start_year_quarter[1], 5)] + + [(x, y) for x in range(start_year_quarter[0] + 1, current_year_quarter[0]) for y in range(1, 5)] + + [(current_year_quarter[0], x) for x in range(1, current_year_quarter[1] + 1)] + ) + else: + raise Exception("wrong start time:{}".format(start)) + + +def date_and_time(the_date, the_time): + time_str = "{}T{}:00.000".format(to_time_str(the_date), the_time) + + return to_pd_timestamp(time_str) + + +def split_time_interval(start, end, method=None, interval=30, freq="D"): + start = to_pd_timestamp(start) + end = to_pd_timestamp(end) + if not method: + while start < end: + interval_end = min(date_time_by_interval(start, interval), end) + yield pd.date_range(start=start, end=interval_end, freq=freq) + start = date_time_by_interval(interval_end, 1) + + if method == "month": + while start <= end: + _, day = calendar.monthrange(start.year, start.month) + + interval_end = min(to_pd_timestamp(f"{start.year}-{start.month}-{day}"), end) + yield pd.date_range(start=start, end=interval_end, freq=freq) + start = date_time_by_interval(interval_end, 1) + + +def count_interval(start_date, end_date): + start_date = to_pd_timestamp(start_date) + end_date = to_pd_timestamp(end_date) + delta = end_date - start_date + return delta.days + + +if __name__ == "__main__": + print(tomorrow_date() > date_time_by_interval(today(), 2)) +# the __all__ is generated +__all__ = [ + "CHINA_TZ", + "TIME_FORMAT_ISO8601", + "TIME_FORMAT_MON", + "TIME_FORMAT_DAY", + "TIME_FORMAT_DAY1", + "TIME_FORMAT_MINUTE", + "TIME_FORMAT_SECOND", + "TIME_FORMAT_MINUTE1", + "TIME_FORMAT_MINUTE2", + "to_pd_timestamp", + "get_local_timezone", + "to_timestamp", + "now_timestamp", + "now_pd_timestamp", + "today", + "current_date", + "tomorrow_date", + "to_time_str", + "now_time_str", + "recent_year_date", + "date_time_by_interval", + "pre_month", + "pre_month_start_date", + "pre_month_end_date", + "month_start_date", + "month_end_date", + "month_start_end_ranges", + "is_same_date", + "is_same_time", + "get_year_quarter", + "day_offset_today", + "get_year_quarters", + "date_and_time", + "split_time_interval", + "count_interval", +] diff --git a/src/zvt/utils/utils.py b/src/zvt/utils/utils.py new file mode 100644 index 00000000..a0d85fcf --- /dev/null +++ b/src/zvt/utils/utils.py @@ -0,0 +1,312 @@ +# -*- coding: utf-8 -*- +import logging +import numbers +from decimal import * +from urllib import parse + +import pandas as pd + +getcontext().prec = 16 + +logger = logging.getLogger(__name__) + +none_values = ["不变", "--", "-", "新进"] +zero_values = ["不变", "--", "-", "新进"] + + +def first_item_to_float(the_list): + return to_float(the_list[0]) + + +def second_item_to_float(the_list): + return to_float(the_list[1]) + + +def add_func_to_value(the_map, the_func): + for k, v in the_map.items(): + the_map[k] = (v, the_func) + return the_map + + +def to_float(the_str, default=None): + if not the_str: + return default + if the_str in none_values: + return None + + if "%" in the_str: + return pct_to_float(the_str) + try: + scale = 1.0 + if the_str[-2:] == "万亿": + the_str = the_str[0:-2] + scale = 1000000000000 + elif the_str[-1] == "亿": + the_str = the_str[0:-1] + scale = 100000000 + elif the_str[-1] == "万": + the_str = the_str[0:-1] + scale = 10000 + if not the_str: + return default + return float(Decimal(the_str.replace(",", "")) * Decimal(scale)) + except Exception as e: + logger.error("the_str:{}".format(the_str)) + logger.exception(e) + return default + + +def pct_to_float(the_str, default=None): + if the_str in none_values: + return None + + try: + return float(Decimal(the_str.replace("%", "")) / Decimal(100)) + except Exception as e: + logger.exception(e) + return default + + +def float_to_pct(input_float: float) -> str: + # Convert the float to a percentage and format it to two decimal places + return f"{input_float * 100:.2f}%" + + +def json_callback_param(the_str): + json_str = the_str[the_str.index("(") + 1 : the_str.rindex(")")].replace("null", "None") + return eval(json_str) + + +def fill_domain_from_dict(the_domain, the_dict: dict, the_map: dict = None, default_func=lambda x: x): + """ + use field map and related func to fill properties from the dict to the domain + + + :param the_domain: + :type the_domain: DeclarativeMeta + :param the_dict: + :type the_dict: dict + :param the_map: + :type the_map: dict + :param default_func: + :type default_func: function + """ + if not the_map: + the_map = {} + for k in the_dict: + the_map[k] = (k, default_func) + + for k, v in the_map.items(): + if isinstance(v, tuple): + field_in_dict = v[0] + the_func = v[1] + else: + field_in_dict = v + the_func = default_func + + the_value = the_dict.get(field_in_dict) + if the_value is not None: + to_value = the_value + if to_value in none_values: + setattr(the_domain, k, None) + else: + result_value = the_func(to_value) + setattr(the_domain, k, result_value) + exec("the_domain.{}=result_value".format(k)) + + +SUPPORT_ENCODINGS = ["GB2312", "GBK", "GB18030", "UTF-8"] + + +def read_csv(f, encoding, sep=None, na_values=None): + encodings = [encoding] + SUPPORT_ENCODINGS + for encoding in encodings: + try: + if sep: + return pd.read_csv(f, sep=sep, encoding=encoding, na_values=na_values) + else: + return pd.read_csv(f, encoding=encoding, na_values=na_values) + except UnicodeDecodeError as e: + logger.warning("read_csv failed by using encoding:{}".format(encoding), e) + f.seek(0) + continue + return None + + +def chrome_copy_header_to_dict(src): + lines = src.split("\n") + header = {} + if lines: + for line in lines: + try: + index = line.index(":") + key = line[:index] + value = line[index + 1 :] + if key and value: + header.setdefault(key.strip(), value.strip()) + except Exception: + pass + return header + + +def to_positive_number(number): + if isinstance(number, numbers.Number): + return abs(number) + + return 0 + + +def multiple_number(number, factor): + try: + return number * factor + except: + return number + + +def add_to_map_list(the_map, key, value): + result = [] + if key in the_map: + result = the_map[key] + else: + the_map[key] = result + + if value not in result: + result.append(value) + + +def iterate_with_step(data, sub_size=100): + size = len(data) + if size >= sub_size: + step_count = int(size / sub_size) + if size % sub_size: + step_count = step_count + 1 + else: + step_count = 1 + + for step in range(step_count): + if type(data) == pd.DataFrame or type(data) == pd.Series: + yield data.iloc[sub_size * step : sub_size * (step + 1)] + else: + yield data[sub_size * step : sub_size * (step + 1)] + + +def url_unquote(url): + return parse.unquote(url) + + +def parse_url_params(url): + url = url_unquote(url) + return parse.parse_qs(parse.urlsplit(url).query) + + +def set_one_and_only_one(**kwargs): + all_none = all(kwargs[v] is None for v in kwargs) + if all_none: + raise ValueError(f"{kwargs} must be set one at least") + + set_size = len([v for v in kwargs if kwargs[v] is not None]) + if set_size != 1: + raise ValueError(f"{kwargs} could only set one") + + return True + + +def flatten_list(input_list): + if not input_list: + return input_list + result = [] + for item in input_list: + if isinstance(item, list): + result.extend(item) + elif isinstance(item, dict): + result.append(item) + else: + result.append(item) + return result + + +def to_str(str_or_list): + if not str_or_list: + return None + if isinstance(str_or_list, str): + return str_or_list + if isinstance(str_or_list, list): + str_list = [str(item) for item in str_or_list] + return ";".join(str_list) + + +def compare_dicts(dict1, dict2): + # Check if both dictionaries are None + if dict1 is None and dict2 is None: + return True + + # Check if only one dictionary is None + if dict1 is None or dict2 is None: + return False + + # Check if the keys are the same + if set(dict1.keys()) != set(dict2.keys()): + return False + + # Check if the values are the same for each key + for key in dict1: + if dict1[key] != dict2[key]: + return False + + # If all keys and values match, return True + return True + + +def fill_dict(src, dst): + """ + Fills items from the source dictionary (src) into the destination dictionary (dst) + if the keys are not already present in dst. + + Args: + src (dict): The source dictionary to copy items from. + dst (dict): The destination dictionary to copy items into. + + Returns: + dict: The updated destination dictionary with new items from the source dictionary. + """ + if not src: + return dst + for key, value in src.items(): + if key not in dst: + dst[key] = value + return dst + + +if __name__ == "__main__": + url = url_unquote( + "https://datacenter.eastmoney.com/securities/api/data/get?type=RPT_DAILYBILLBOARD_DETAILS&sty=ALL&source=DataCenter&client=WAP&p=1&ps=20&sr=-1,1&st=TRADE_DATE,SECURITY_CODE&filter=(TRADE_DATE%3E=%272022-04-01%27)(TRADE_DATE%3C=%272022-04-29%27)(MARKET=%22SH%22)&?v=05160638952989893" + ) + print(url) + +# the __all__ is generated +__all__ = [ + "none_values", + "zero_values", + "first_item_to_float", + "second_item_to_float", + "add_func_to_value", + "to_float", + "pct_to_float", + "float_to_pct", + "json_callback_param", + "fill_domain_from_dict", + "SUPPORT_ENCODINGS", + "read_csv", + "chrome_copy_header_to_dict", + "to_positive_number", + "multiple_number", + "add_to_map_list", + "iterate_with_step", + "url_unquote", + "parse_url_params", + "set_one_and_only_one", + "flatten_list", + "to_str", + "compare_dicts", + "fill_dict", +] diff --git a/zvt/utils/zip_utils.py b/src/zvt/utils/zip_utils.py similarity index 64% rename from zvt/utils/zip_utils.py rename to src/zvt/utils/zip_utils.py index 62e3f2e1..70425ae7 100644 --- a/zvt/utils/zip_utils.py +++ b/src/zvt/utils/zip_utils.py @@ -2,12 +2,11 @@ import datetime import os +import pathlib import zipfile -def zip_dir(src_dir, - dst_dir=None, - zip_file_name=None): +def zip_dir(src_dir, dst_dir=None, zip_file_name=None): if not zip_file_name: zip_file_name = "data-{}.zip".format(datetime.datetime.today()) @@ -18,7 +17,7 @@ def zip_dir(src_dir, # os.remove(dst_path) - the_zip_file = zipfile.ZipFile(dst_path, 'w') + the_zip_file = zipfile.ZipFile(dst_path, "w") for folder, subfolders, files in os.walk(src_dir): for file in files: @@ -26,9 +25,7 @@ def zip_dir(src_dir, # if 'zvt_business.db' in the_path: # continue print("zip {}".format(the_path)) - the_zip_file.write(the_path, - os.path.relpath(the_path, src_dir), - compress_type=zipfile.ZIP_DEFLATED) + the_zip_file.write(the_path, os.path.relpath(the_path, src_dir), compress_type=zipfile.ZIP_DEFLATED) the_zip_file.close() @@ -36,8 +33,13 @@ def zip_dir(src_dir, def unzip(zip_file, dst_dir): the_zip_file = zipfile.ZipFile(zip_file) print("start unzip {} to {}".format(zip_file, dst_dir)) - the_zip_file.extractall(dst_dir) + + for name in the_zip_file.namelist(): + extracted_path = pathlib.Path(the_zip_file.extract(name, path=dst_dir)) + extracted_path.rename(f"{extracted_path}".encode("cp437").decode("gbk")) print("finish unzip {} to {}".format(zip_file, dst_dir)) the_zip_file.close() + + # the __all__ is generated -__all__ = ['zip_dir', 'unzip'] \ No newline at end of file +__all__ = ["zip_dir", "unzip"] diff --git a/src/zvt/zvt_server.py b/src/zvt/zvt_server.py new file mode 100644 index 00000000..3b288bfa --- /dev/null +++ b/src/zvt/zvt_server.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +import os + +import uvicorn +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware +from fastapi.responses import ORJSONResponse +from fastapi_pagination import add_pagination + +from zvt import zvt_env +from zvt.rest.data import data_router +from zvt.rest.factor import factor_router +from zvt.rest.misc import misc_router +from zvt.rest.trading import trading_router +from zvt.rest.work import work_router + +app = FastAPI(default_response_class=ORJSONResponse) + +origins = ["*"] + +app.add_middleware( + CORSMiddleware, + allow_origins=origins, + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + + +@app.get("/") +async def root(): + return {"message": "Hello World"} + + +app.include_router(data_router) +app.include_router(factor_router) +app.include_router(work_router) +app.include_router(trading_router) +app.include_router(misc_router) + +add_pagination(app) + + +def main(): + log_config = os.path.join(zvt_env["resource_path"], "log_conf.yaml") + uvicorn.run("zvt_server:app", host="0.0.0.0", reload=True, port=8090, log_config=log_config) + + +if __name__ == "__main__": + main() diff --git a/tests/__init__.py b/tests/__init__.py index 7013599c..7a30a3d5 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,4 +1,8 @@ # -*- coding: utf-8 -*- import os +import sys -os.environ.setdefault('TESTING_ZVT', 'True') +os.environ.setdefault("TESTING_ZVT", "True") +os.environ.setdefault("SQLALCHEMY_WARN_20", "1") + +sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "../src"))) diff --git a/tests/api/test_common.py b/tests/api/test_common.py index 49dd3486..ac1b53b0 100644 --- a/tests/api/test_common.py +++ b/tests/api/test_common.py @@ -1,23 +1,23 @@ # -*- coding: utf-8 -*- +from zvt.api.utils import get_recent_report_date from zvt.contract import IntervalLevel -from zvt.api.quote import get_kdata -from zvt.api.quote import to_high_level_kdata, get_recent_report_date +from zvt.api.kdata import get_kdata, to_high_level_kdata from ..context import init_test_context init_test_context() def test_to_high_level_kdata(): - day_df = get_kdata(provider='joinquant', level=IntervalLevel.LEVEL_1DAY, entity_id='stock_sz_000338') + day_df = get_kdata(provider="joinquant", level=IntervalLevel.LEVEL_1DAY, entity_id="stock_sz_000338") print(day_df) - df = to_high_level_kdata(kdata_df=day_df.loc[:'2019-09-01', :], to_level=IntervalLevel.LEVEL_1WEEK) + df = to_high_level_kdata(kdata_df=day_df.loc[:"2019-09-01", :], to_level=IntervalLevel.LEVEL_1WEEK) print(df) def test_get_recent_report_date(): - assert '2018-12-31' == get_recent_report_date('2019-01-01', 0) - assert '2018-09-30' == get_recent_report_date('2019-01-01', 1) - assert '2018-06-30' == get_recent_report_date('2019-01-01', 2) - assert '2018-03-31' == get_recent_report_date('2019-01-01', 3) + assert "2018-12-31" == get_recent_report_date("2019-01-01", 0) + assert "2018-09-30" == get_recent_report_date("2019-01-01", 1) + assert "2018-06-30" == get_recent_report_date("2019-01-01", 2) + assert "2018-03-31" == get_recent_report_date("2019-01-01", 3) diff --git a/tests/api/test_dividend_financing.py b/tests/api/test_dividend_financing.py index 43eb7797..fb190617 100644 --- a/tests/api/test_dividend_financing.py +++ b/tests/api/test_dividend_financing.py @@ -6,18 +6,22 @@ from zvt.contract.api import get_db_session from zvt.utils.time_utils import to_pd_timestamp -session = get_db_session(provider='eastmoney', - db_name='dividend_financing') # type: sqlalchemy.orm.Session +session = get_db_session(provider="eastmoney", db_name="dividend_financing") # type: sqlalchemy.orm.Session # 增发详情 def test_000778_spo_detial(): - result = SpoDetail.query_data(session=session, provider='eastmoney', return_type='domain', - codes=['000778'], end_timestamp='2018-09-30', - order=SpoDetail.timestamp.desc()) + result = SpoDetail.query_data( + session=session, + provider="eastmoney", + return_type="domain", + codes=["000778"], + end_timestamp="2018-09-30", + order=SpoDetail.timestamp.desc(), + ) assert len(result) == 4 latest: SpoDetail = result[0] - assert latest.timestamp == to_pd_timestamp('2017-04-01') + assert latest.timestamp == to_pd_timestamp("2017-04-01") assert latest.spo_issues == 347600000 assert latest.spo_price == 5.15 assert latest.spo_raising_fund == 1766000000 @@ -25,12 +29,17 @@ def test_000778_spo_detial(): # 配股详情 def test_000778_rights_issue_detail(): - result = RightsIssueDetail.query_data(session=session, provider='eastmoney', return_type='domain', - codes=['000778'], end_timestamp='2018-09-30', - order=RightsIssueDetail.timestamp.desc()) + result = RightsIssueDetail.query_data( + session=session, + provider="eastmoney", + return_type="domain", + codes=["000778"], + end_timestamp="2018-09-30", + order=RightsIssueDetail.timestamp.desc(), + ) assert len(result) == 2 latest: RightsIssueDetail = result[0] - assert latest.timestamp == to_pd_timestamp('2001-09-10') + assert latest.timestamp == to_pd_timestamp("2001-09-10") assert latest.rights_issues == 43570000 assert latest.rights_raising_fund == 492300000 assert latest.rights_issue_price == 11.3 @@ -38,12 +47,17 @@ def test_000778_rights_issue_detail(): # 分红融资 def test_000778_dividend_financing(): - result = DividendFinancing.query_data(session=session, provider='eastmoney', return_type='domain', - codes=['000778'], end_timestamp='2018-09-30', - order=DividendFinancing.timestamp.desc()) + result = DividendFinancing.query_data( + session=session, + provider="eastmoney", + return_type="domain", + codes=["000778"], + end_timestamp="2018-09-30", + order=DividendFinancing.timestamp.desc(), + ) assert len(result) == 22 latest: DividendFinancing = result[1] - assert latest.timestamp == to_pd_timestamp('2017') + assert latest.timestamp == to_pd_timestamp("2017") assert latest.dividend_money == 598632026.4 assert latest.spo_issues == 347572815.0 assert latest.rights_issues == 0 diff --git a/tests/api/test_finance.py b/tests/api/test_finance.py index 91189fa2..9a7c4436 100644 --- a/tests/api/test_finance.py +++ b/tests/api/test_finance.py @@ -6,30 +6,113 @@ from zvt.contract.api import get_db_session from zvt.utils.time_utils import to_time_str -session = get_db_session(provider='eastmoney', db_name='finance') # type: sqlalchemy.orm.Session +session = get_db_session(provider="eastmoney", db_name="finance") # type: sqlalchemy.orm.Session # 银行指标 def test_000001_finance_factor(): - correct_timestamps = ['2018-09-30', '2018-06-30', '2018-03-31', '2017-12-31', '2017-09-30', '2017-06-30', - '2017-03-31', '2016-12-31', '2016-09-30', '2016-06-30', '2016-03-31', '2015-12-31', - '2015-09-30', '2015-06-30', '2015-03-31', '2014-12-31', '2014-09-30', '2014-06-30', - '2014-03-31', '2013-12-31', '2013-09-30', '2013-06-30', '2013-03-31', '2012-12-31', - '2012-09-30', '2012-06-30', '2012-03-31', '2011-12-31', '2011-09-30', '2011-06-30', - '2011-03-31', '2010-12-31', '2010-09-30', '2010-06-30', '2010-03-31', '2009-12-31', - '2009-09-30', '2009-06-30', '2009-03-31', '2008-12-31', '2008-09-30', '2008-06-30', - '2008-03-31', '2007-12-31', '2007-09-30', '2007-06-30', '2007-03-31', '2006-12-31', - '2006-09-30', '2006-06-30', '2006-03-31', '2005-12-31', '2005-09-30', '2005-06-30', - '2005-03-31', '2004-12-31', '2004-09-30', '2004-06-30', '2004-03-31', '2003-12-31', - '2003-09-30', '2003-06-30', '2003-03-31', '2002-12-31', '2002-09-30', '2002-06-30', - '2002-03-31', '2001-12-31', '2001-09-30', '2001-06-30', '2001-03-31', '2000-12-31', - '2000-06-30', '1999-12-31', '1999-06-30', '1998-12-31', '1998-06-30', '1997-12-31', - '1997-06-30', '1996-12-31', '1996-06-30', '1995-12-31', '1995-06-30', '1994-12-31', - '1994-06-30', '1993-12-31', '1993-06-30', '1992-12-31', '1991-12-31', '1990-12-31', - '1989-12-31'] - result = FinanceFactor.query_data(session=session, provider='eastmoney', return_type='domain', - codes=['000001'], end_timestamp='2018-12-30', - order=FinanceFactor.report_date.desc(), time_field='report_date') + correct_timestamps = [ + "2018-09-30", + "2018-06-30", + "2018-03-31", + "2017-12-31", + "2017-09-30", + "2017-06-30", + "2017-03-31", + "2016-12-31", + "2016-09-30", + "2016-06-30", + "2016-03-31", + "2015-12-31", + "2015-09-30", + "2015-06-30", + "2015-03-31", + "2014-12-31", + "2014-09-30", + "2014-06-30", + "2014-03-31", + "2013-12-31", + "2013-09-30", + "2013-06-30", + "2013-03-31", + "2012-12-31", + "2012-09-30", + "2012-06-30", + "2012-03-31", + "2011-12-31", + "2011-09-30", + "2011-06-30", + "2011-03-31", + "2010-12-31", + "2010-09-30", + "2010-06-30", + "2010-03-31", + "2009-12-31", + "2009-09-30", + "2009-06-30", + "2009-03-31", + "2008-12-31", + "2008-09-30", + "2008-06-30", + "2008-03-31", + "2007-12-31", + "2007-09-30", + "2007-06-30", + "2007-03-31", + "2006-12-31", + "2006-09-30", + "2006-06-30", + "2006-03-31", + "2005-12-31", + "2005-09-30", + "2005-06-30", + "2005-03-31", + "2004-12-31", + "2004-09-30", + "2004-06-30", + "2004-03-31", + "2003-12-31", + "2003-09-30", + "2003-06-30", + "2003-03-31", + "2002-12-31", + "2002-09-30", + "2002-06-30", + "2002-03-31", + "2001-12-31", + "2001-09-30", + "2001-06-30", + "2001-03-31", + "2000-12-31", + "2000-06-30", + "1999-12-31", + "1999-06-30", + "1998-12-31", + "1998-06-30", + "1997-12-31", + "1997-06-30", + "1996-12-31", + "1996-06-30", + "1995-12-31", + "1995-06-30", + "1994-12-31", + "1994-06-30", + "1993-12-31", + "1993-06-30", + "1992-12-31", + "1991-12-31", + "1990-12-31", + "1989-12-31", + ] + result = FinanceFactor.query_data( + session=session, + provider="eastmoney", + return_type="domain", + codes=["000001"], + end_timestamp="2018-12-30", + order=FinanceFactor.report_date.desc(), + time_field="report_date", + ) assert len(correct_timestamps) == len(result) timestamps = [to_time_str(item.report_date) for item in result] assert set(correct_timestamps) == set(timestamps) @@ -70,24 +153,105 @@ def test_000001_finance_factor(): # 银行资产负债表 def test_000001_balance_sheet(): - correct_timestamps = ['2018-09-30', '2018-06-30', '2018-03-31', '2017-12-31', '2017-09-30', '2017-06-30', - '2017-03-31', '2016-12-31', '2016-09-30', '2016-06-30', '2016-03-31', '2015-12-31', - '2015-09-30', '2015-06-30', '2015-03-31', '2014-12-31', '2014-09-30', '2014-06-30', - '2014-03-31', '2013-12-31', '2013-09-30', '2013-06-30', '2013-03-31', '2012-12-31', - '2012-09-30', '2012-06-30', '2012-03-31', '2011-12-31', '2011-09-30', '2011-06-30', - '2011-03-31', '2010-12-31', '2010-09-30', '2010-06-30', '2010-03-31', '2009-12-31', - '2009-09-30', '2009-06-30', '2009-03-31', '2008-12-31', '2008-09-30', '2008-06-30', - '2008-03-31', '2007-12-31', '2007-09-30', '2007-06-30', '2007-03-31', '2006-12-31', - '2006-09-30', '2006-06-30', '2006-03-31', '2005-12-31', '2005-09-30', '2005-06-30', - '2005-03-31', '2004-12-31', '2004-09-30', '2004-06-30', '2004-03-31', '2003-12-31', - '2003-09-30', '2003-06-30', '2003-03-31', '2002-12-31', '2002-09-30', '2002-06-30', - '2002-03-31', '2001-12-31', '2001-06-30', '2000-12-31', '2000-06-30', '1999-12-31', - '1999-06-30', '1998-12-31', '1998-06-30', '1997-12-31', '1997-06-30', '1996-12-31', - '1996-06-30', '1995-12-31', '1995-06-30', '1994-12-31', '1994-06-30', '1993-12-31', - '1992-12-31', '1991-12-31', '1990-12-31', '1989-12-31'] - result = BalanceSheet.query_data(session=session, provider='eastmoney', return_type='domain', - codes=['000001'], end_timestamp='2018-12-30', - order=BalanceSheet.report_date.desc(), time_field='report_date') + correct_timestamps = [ + "2018-09-30", + "2018-06-30", + "2018-03-31", + "2017-12-31", + "2017-09-30", + "2017-06-30", + "2017-03-31", + "2016-12-31", + "2016-09-30", + "2016-06-30", + "2016-03-31", + "2015-12-31", + "2015-09-30", + "2015-06-30", + "2015-03-31", + "2014-12-31", + "2014-09-30", + "2014-06-30", + "2014-03-31", + "2013-12-31", + "2013-09-30", + "2013-06-30", + "2013-03-31", + "2012-12-31", + "2012-09-30", + "2012-06-30", + "2012-03-31", + "2011-12-31", + "2011-09-30", + "2011-06-30", + "2011-03-31", + "2010-12-31", + "2010-09-30", + "2010-06-30", + "2010-03-31", + "2009-12-31", + "2009-09-30", + "2009-06-30", + "2009-03-31", + "2008-12-31", + "2008-09-30", + "2008-06-30", + "2008-03-31", + "2007-12-31", + "2007-09-30", + "2007-06-30", + "2007-03-31", + "2006-12-31", + "2006-09-30", + "2006-06-30", + "2006-03-31", + "2005-12-31", + "2005-09-30", + "2005-06-30", + "2005-03-31", + "2004-12-31", + "2004-09-30", + "2004-06-30", + "2004-03-31", + "2003-12-31", + "2003-09-30", + "2003-06-30", + "2003-03-31", + "2002-12-31", + "2002-09-30", + "2002-06-30", + "2002-03-31", + "2001-12-31", + "2001-06-30", + "2000-12-31", + "2000-06-30", + "1999-12-31", + "1999-06-30", + "1998-12-31", + "1998-06-30", + "1997-12-31", + "1997-06-30", + "1996-12-31", + "1996-06-30", + "1995-12-31", + "1995-06-30", + "1994-12-31", + "1994-06-30", + "1993-12-31", + "1992-12-31", + "1991-12-31", + "1990-12-31", + "1989-12-31", + ] + result = BalanceSheet.query_data( + session=session, + provider="eastmoney", + return_type="domain", + codes=["000001"], + end_timestamp="2018-12-30", + order=BalanceSheet.report_date.desc(), + time_field="report_date", + ) assert len(correct_timestamps) == len(result) timestamps = [to_time_str(item.report_date) for item in result] assert set(correct_timestamps) == set(timestamps) @@ -142,25 +306,108 @@ def test_000001_balance_sheet(): # 银行利润表 def test_000001_income_statement(): - correct_timestamps = ['2018-09-30', '2018-06-30', '2018-03-31', '2017-12-31', '2017-09-30', - '2017-06-30', '2017-03-31', '2016-12-31', '2016-09-30', '2016-06-30', '2016-03-31', - '2015-12-31', '2015-09-30', '2015-06-30', '2015-03-31', '2014-12-31', '2014-09-30', - '2014-06-30', '2014-03-31', '2013-12-31', '2013-09-30', '2013-06-30', '2013-03-31', - '2012-12-31', '2012-09-30', '2012-06-30', '2012-03-31', '2011-12-31', '2011-09-30', - '2011-06-30', '2011-03-31', '2010-12-31', '2010-09-30', '2010-06-30', '2010-03-31', - '2009-12-31', '2009-09-30', '2009-06-30', '2009-03-31', '2008-12-31', '2008-09-30', - '2008-06-30', '2008-03-31', '2007-12-31', '2007-09-30', '2007-06-30', '2007-03-31', - '2006-12-31', '2006-09-30', '2006-06-30', '2006-03-31', '2005-12-31', '2005-09-30', - '2005-06-30', '2005-03-31', '2004-12-31', '2004-09-30', '2004-06-30', '2004-03-31', - '2003-12-31', '2003-09-30', '2003-06-30', '2003-03-31', '2002-12-31', '2002-09-30', - '2002-06-30', '2002-03-31', '2001-12-31', '2001-09-30', '2001-06-30', '2001-03-31', - '2000-12-31', '2000-06-30', '1999-12-31', '1999-06-30', '1998-12-31', '1998-06-30', - '1997-12-31', '1997-06-30', '1996-12-31', '1996-06-30', '1995-12-31', '1995-06-30', - '1994-12-31', '1994-06-30', '1993-12-31', '1993-06-30', '1992-12-31', '1991-12-31', - '1990-12-31', '1989-12-31'] - result = IncomeStatement.query_data(session=session, provider='eastmoney', return_type='domain', - codes=['000001'], end_timestamp='2018-12-30', - order=IncomeStatement.report_date.desc(), time_field='report_date') + correct_timestamps = [ + "2018-09-30", + "2018-06-30", + "2018-03-31", + "2017-12-31", + "2017-09-30", + "2017-06-30", + "2017-03-31", + "2016-12-31", + "2016-09-30", + "2016-06-30", + "2016-03-31", + "2015-12-31", + "2015-09-30", + "2015-06-30", + "2015-03-31", + "2014-12-31", + "2014-09-30", + "2014-06-30", + "2014-03-31", + "2013-12-31", + "2013-09-30", + "2013-06-30", + "2013-03-31", + "2012-12-31", + "2012-09-30", + "2012-06-30", + "2012-03-31", + "2011-12-31", + "2011-09-30", + "2011-06-30", + "2011-03-31", + "2010-12-31", + "2010-09-30", + "2010-06-30", + "2010-03-31", + "2009-12-31", + "2009-09-30", + "2009-06-30", + "2009-03-31", + "2008-12-31", + "2008-09-30", + "2008-06-30", + "2008-03-31", + "2007-12-31", + "2007-09-30", + "2007-06-30", + "2007-03-31", + "2006-12-31", + "2006-09-30", + "2006-06-30", + "2006-03-31", + "2005-12-31", + "2005-09-30", + "2005-06-30", + "2005-03-31", + "2004-12-31", + "2004-09-30", + "2004-06-30", + "2004-03-31", + "2003-12-31", + "2003-09-30", + "2003-06-30", + "2003-03-31", + "2002-12-31", + "2002-09-30", + "2002-06-30", + "2002-03-31", + "2001-12-31", + "2001-09-30", + "2001-06-30", + "2001-03-31", + "2000-12-31", + "2000-06-30", + "1999-12-31", + "1999-06-30", + "1998-12-31", + "1998-06-30", + "1997-12-31", + "1997-06-30", + "1996-12-31", + "1996-06-30", + "1995-12-31", + "1995-06-30", + "1994-12-31", + "1994-06-30", + "1993-12-31", + "1993-06-30", + "1992-12-31", + "1991-12-31", + "1990-12-31", + "1989-12-31", + ] + result = IncomeStatement.query_data( + session=session, + provider="eastmoney", + return_type="domain", + codes=["000001"], + end_timestamp="2018-12-30", + order=IncomeStatement.report_date.desc(), + time_field="report_date", + ) assert len(correct_timestamps) == len(result) timestamps = [to_time_str(item.report_date) for item in result] assert set(correct_timestamps) == set(timestamps) @@ -191,22 +438,90 @@ def test_000001_income_statement(): # 银行现金流量表 def test_000001_cash_flow_statement(): - correct_timestamps = ['2018-09-30', '2018-06-30', '2018-03-31', '2017-12-31', '2017-09-30', - '2017-06-30', '2017-03-31', '2016-12-31', '2016-09-30', '2016-06-30', '2016-03-31', - '2015-12-31', '2015-09-30', '2015-06-30', '2015-03-31', '2014-12-31', '2014-09-30', - '2014-06-30', '2014-03-31', '2013-12-31', '2013-09-30', '2013-06-30', '2013-03-31', - '2012-12-31', '2012-09-30', '2012-06-30', '2012-03-31', '2011-12-31', '2011-09-30', - '2011-06-30', '2011-03-31', '2010-12-31', '2010-09-30', '2010-06-30', '2010-03-31', - '2009-12-31', '2009-09-30', '2009-06-30', '2009-03-31', '2008-12-31', '2008-09-30', - '2008-06-30', '2008-03-31', '2007-12-31', '2007-09-30', '2007-06-30', '2007-03-31', - '2006-12-31', '2006-09-30', '2006-06-30', '2006-03-31', '2005-12-31', '2005-09-30', - '2005-06-30', '2005-03-31', '2004-12-31', '2004-09-30', '2004-06-30', '2004-03-31', - '2003-12-31', '2003-09-30', '2003-06-30', '2003-03-31', '2002-12-31', '2002-06-30', - '2001-12-31', '2001-06-30', '2000-12-31', '2000-06-30', '1999-12-31', '1999-06-30', - '1998-12-31', '1998-06-30'] - result = CashFlowStatement.query_data(session=session, provider='eastmoney', return_type='domain', - codes=['000001'], end_timestamp='2018-12-30', - order=CashFlowStatement.report_date.desc(), time_field='report_date') + correct_timestamps = [ + "2018-09-30", + "2018-06-30", + "2018-03-31", + "2017-12-31", + "2017-09-30", + "2017-06-30", + "2017-03-31", + "2016-12-31", + "2016-09-30", + "2016-06-30", + "2016-03-31", + "2015-12-31", + "2015-09-30", + "2015-06-30", + "2015-03-31", + "2014-12-31", + "2014-09-30", + "2014-06-30", + "2014-03-31", + "2013-12-31", + "2013-09-30", + "2013-06-30", + "2013-03-31", + "2012-12-31", + "2012-09-30", + "2012-06-30", + "2012-03-31", + "2011-12-31", + "2011-09-30", + "2011-06-30", + "2011-03-31", + "2010-12-31", + "2010-09-30", + "2010-06-30", + "2010-03-31", + "2009-12-31", + "2009-09-30", + "2009-06-30", + "2009-03-31", + "2008-12-31", + "2008-09-30", + "2008-06-30", + "2008-03-31", + "2007-12-31", + "2007-09-30", + "2007-06-30", + "2007-03-31", + "2006-12-31", + "2006-09-30", + "2006-06-30", + "2006-03-31", + "2005-12-31", + "2005-09-30", + "2005-06-30", + "2005-03-31", + "2004-12-31", + "2004-09-30", + "2004-06-30", + "2004-03-31", + "2003-12-31", + "2003-09-30", + "2003-06-30", + "2003-03-31", + "2002-12-31", + "2002-06-30", + "2001-12-31", + "2001-06-30", + "2000-12-31", + "2000-06-30", + "1999-12-31", + "1999-06-30", + "1998-12-31", + "1998-06-30", + ] + result = CashFlowStatement.query_data( + session=session, + provider="eastmoney", + return_type="domain", + codes=["000001"], + end_timestamp="2018-12-30", + order=CashFlowStatement.report_date.desc(), + time_field="report_date", + ) assert len(correct_timestamps) == len(result) timestamps = [to_time_str(item.report_date) for item in result] assert set(correct_timestamps) == set(timestamps) @@ -257,23 +572,97 @@ def test_000001_cash_flow_statement(): # 企业指标 def test_000778_finance_factor(): - correct_timestamps = ['2018-09-30', '2018-06-30', '2018-03-31', '2017-12-31', '2017-09-30', '2017-06-30', - '2017-03-31', '2016-12-31', '2016-09-30', '2016-06-30', '2016-03-31', '2015-12-31', - '2015-09-30', '2015-06-30', '2015-03-31', '2014-12-31', '2014-09-30', '2014-06-30', - '2014-03-31', '2013-12-31', '2013-09-30', '2013-06-30', '2013-03-31', '2012-12-31', - '2012-09-30', '2012-06-30', '2012-03-31', '2011-12-31', '2011-09-30', '2011-06-30', - '2011-03-31', '2010-12-31', '2010-09-30', '2010-06-30', '2010-03-31', '2009-12-31', - '2009-09-30', '2009-06-30', '2009-03-31', '2008-12-31', '2008-09-30', '2008-06-30', - '2008-03-31', '2007-12-31', '2007-09-30', '2007-06-30', '2007-03-31', '2006-12-31', - '2006-09-30', '2006-06-30', '2006-03-31', '2005-12-31', '2005-09-30', '2005-06-30', - '2005-03-31', '2004-12-31', '2004-09-30', '2004-06-30', '2004-03-31', '2003-12-31', - '2003-09-30', '2003-06-30', '2003-03-31', '2002-12-31', '2002-09-30', '2002-06-30', - '2002-03-31', '2001-12-31', '2001-06-30', '2000-12-31', '2000-06-30', '1999-12-31', - '1999-06-30', '1998-12-31', '1998-06-30', '1997-12-31', '1997-06-30', '1996-12-31', - '1995-12-31', '1994-12-31'] - result = FinanceFactor.query_data(session=session, provider='eastmoney', return_type='domain', - codes=['000778'], end_timestamp='2018-12-30', - order=FinanceFactor.report_date.desc(), time_field='report_date') + correct_timestamps = [ + "2018-09-30", + "2018-06-30", + "2018-03-31", + "2017-12-31", + "2017-09-30", + "2017-06-30", + "2017-03-31", + "2016-12-31", + "2016-09-30", + "2016-06-30", + "2016-03-31", + "2015-12-31", + "2015-09-30", + "2015-06-30", + "2015-03-31", + "2014-12-31", + "2014-09-30", + "2014-06-30", + "2014-03-31", + "2013-12-31", + "2013-09-30", + "2013-06-30", + "2013-03-31", + "2012-12-31", + "2012-09-30", + "2012-06-30", + "2012-03-31", + "2011-12-31", + "2011-09-30", + "2011-06-30", + "2011-03-31", + "2010-12-31", + "2010-09-30", + "2010-06-30", + "2010-03-31", + "2009-12-31", + "2009-09-30", + "2009-06-30", + "2009-03-31", + "2008-12-31", + "2008-09-30", + "2008-06-30", + "2008-03-31", + "2007-12-31", + "2007-09-30", + "2007-06-30", + "2007-03-31", + "2006-12-31", + "2006-09-30", + "2006-06-30", + "2006-03-31", + "2005-12-31", + "2005-09-30", + "2005-06-30", + "2005-03-31", + "2004-12-31", + "2004-09-30", + "2004-06-30", + "2004-03-31", + "2003-12-31", + "2003-09-30", + "2003-06-30", + "2003-03-31", + "2002-12-31", + "2002-09-30", + "2002-06-30", + "2002-03-31", + "2001-12-31", + "2001-06-30", + "2000-12-31", + "2000-06-30", + "1999-12-31", + "1999-06-30", + "1998-12-31", + "1998-06-30", + "1997-12-31", + "1997-06-30", + "1996-12-31", + "1995-12-31", + "1994-12-31", + ] + result = FinanceFactor.query_data( + session=session, + provider="eastmoney", + return_type="domain", + codes=["000778"], + end_timestamp="2018-12-30", + order=FinanceFactor.report_date.desc(), + time_field="report_date", + ) assert len(correct_timestamps) == len(result) timestamps = [to_time_str(item.report_date) for item in result] assert set(correct_timestamps) == set(timestamps) @@ -323,23 +712,97 @@ def test_000778_finance_factor(): # 企业资产负债表 def test_000778_balance_sheet(): - correct_timestamps = ['2018-09-30', '2018-06-30', '2018-03-31', '2017-12-31', '2017-09-30', '2017-06-30', - '2017-03-31', '2016-12-31', '2016-09-30', '2016-06-30', '2016-03-31', '2015-12-31', - '2015-09-30', '2015-06-30', '2015-03-31', '2014-12-31', '2014-09-30', '2014-06-30', - '2014-03-31', '2013-12-31', '2013-09-30', '2013-06-30', '2013-03-31', '2012-12-31', - '2012-09-30', '2012-06-30', '2012-03-31', '2011-12-31', '2011-09-30', '2011-06-30', - '2011-03-31', '2010-12-31', '2010-09-30', '2010-06-30', '2010-03-31', '2009-12-31', - '2009-09-30', '2009-06-30', '2009-03-31', '2008-12-31', '2008-09-30', '2008-06-30', - '2008-03-31', '2007-12-31', '2007-09-30', '2007-06-30', '2007-03-31', '2006-12-31', - '2006-09-30', '2006-06-30', '2006-03-31', '2005-12-31', '2005-09-30', '2005-06-30', - '2005-03-31', '2004-12-31', '2004-09-30', '2004-06-30', '2004-03-31', '2003-12-31', - '2003-09-30', '2003-06-30', '2003-03-31', '2002-12-31', '2002-09-30', '2002-06-30', - '2002-03-31', '2001-12-31', '2001-06-30', '2000-12-31', '2000-06-30', '1999-12-31', - '1999-06-30', '1998-12-31', '1998-06-30', '1997-12-31', '1997-06-30', '1996-12-31', - '1995-12-31', '1994-12-31'] - result = BalanceSheet.query_data(session=session, provider='eastmoney', return_type='domain', - codes=['000778'], end_timestamp='2018-12-30', - order=BalanceSheet.report_date.desc(), time_field='report_date') + correct_timestamps = [ + "2018-09-30", + "2018-06-30", + "2018-03-31", + "2017-12-31", + "2017-09-30", + "2017-06-30", + "2017-03-31", + "2016-12-31", + "2016-09-30", + "2016-06-30", + "2016-03-31", + "2015-12-31", + "2015-09-30", + "2015-06-30", + "2015-03-31", + "2014-12-31", + "2014-09-30", + "2014-06-30", + "2014-03-31", + "2013-12-31", + "2013-09-30", + "2013-06-30", + "2013-03-31", + "2012-12-31", + "2012-09-30", + "2012-06-30", + "2012-03-31", + "2011-12-31", + "2011-09-30", + "2011-06-30", + "2011-03-31", + "2010-12-31", + "2010-09-30", + "2010-06-30", + "2010-03-31", + "2009-12-31", + "2009-09-30", + "2009-06-30", + "2009-03-31", + "2008-12-31", + "2008-09-30", + "2008-06-30", + "2008-03-31", + "2007-12-31", + "2007-09-30", + "2007-06-30", + "2007-03-31", + "2006-12-31", + "2006-09-30", + "2006-06-30", + "2006-03-31", + "2005-12-31", + "2005-09-30", + "2005-06-30", + "2005-03-31", + "2004-12-31", + "2004-09-30", + "2004-06-30", + "2004-03-31", + "2003-12-31", + "2003-09-30", + "2003-06-30", + "2003-03-31", + "2002-12-31", + "2002-09-30", + "2002-06-30", + "2002-03-31", + "2001-12-31", + "2001-06-30", + "2000-12-31", + "2000-06-30", + "1999-12-31", + "1999-06-30", + "1998-12-31", + "1998-06-30", + "1997-12-31", + "1997-06-30", + "1996-12-31", + "1995-12-31", + "1994-12-31", + ] + result = BalanceSheet.query_data( + session=session, + provider="eastmoney", + return_type="domain", + codes=["000778"], + end_timestamp="2018-12-30", + order=BalanceSheet.report_date.desc(), + time_field="report_date", + ) assert len(correct_timestamps) == len(result) timestamps = [to_time_str(item.report_date) for item in result] assert set(correct_timestamps) == set(timestamps) @@ -400,23 +863,97 @@ def test_000778_balance_sheet(): # 企业利润表 def test_000778_income_statement(): - correct_timestamps = ['2018-09-30', '2018-06-30', '2018-03-31', '2017-12-31', '2017-09-30', '2017-06-30', - '2017-03-31', '2016-12-31', '2016-09-30', '2016-06-30', '2016-03-31', '2015-12-31', - '2015-09-30', '2015-06-30', '2015-03-31', '2014-12-31', '2014-09-30', '2014-06-30', - '2014-03-31', '2013-12-31', '2013-09-30', '2013-06-30', '2013-03-31', '2012-12-31', - '2012-09-30', '2012-06-30', '2012-03-31', '2011-12-31', '2011-09-30', '2011-06-30', - '2011-03-31', '2010-12-31', '2010-09-30', '2010-06-30', '2010-03-31', '2009-12-31', - '2009-09-30', '2009-06-30', '2009-03-31', '2008-12-31', '2008-09-30', '2008-06-30', - '2008-03-31', '2007-12-31', '2007-09-30', '2007-06-30', '2007-03-31', '2006-12-31', - '2006-09-30', '2006-06-30', '2006-03-31', '2005-12-31', '2005-09-30', '2005-06-30', - '2005-03-31', '2004-12-31', '2004-09-30', '2004-06-30', '2004-03-31', '2003-12-31', - '2003-09-30', '2003-06-30', '2003-03-31', '2002-12-31', '2002-09-30', '2002-06-30', - '2002-03-31', '2001-12-31', '2001-06-30', '2000-12-31', '2000-06-30', '1999-12-31', - '1999-06-30', '1998-12-31', '1998-06-30', '1997-12-31', '1997-06-30', '1996-12-31', - '1995-12-31', '1994-12-31'] - result = IncomeStatement.query_data(session=session, provider='eastmoney', return_type='domain', - codes=['000778'], end_timestamp='2018-12-30', - order=IncomeStatement.report_date.desc(), time_field='report_date') + correct_timestamps = [ + "2018-09-30", + "2018-06-30", + "2018-03-31", + "2017-12-31", + "2017-09-30", + "2017-06-30", + "2017-03-31", + "2016-12-31", + "2016-09-30", + "2016-06-30", + "2016-03-31", + "2015-12-31", + "2015-09-30", + "2015-06-30", + "2015-03-31", + "2014-12-31", + "2014-09-30", + "2014-06-30", + "2014-03-31", + "2013-12-31", + "2013-09-30", + "2013-06-30", + "2013-03-31", + "2012-12-31", + "2012-09-30", + "2012-06-30", + "2012-03-31", + "2011-12-31", + "2011-09-30", + "2011-06-30", + "2011-03-31", + "2010-12-31", + "2010-09-30", + "2010-06-30", + "2010-03-31", + "2009-12-31", + "2009-09-30", + "2009-06-30", + "2009-03-31", + "2008-12-31", + "2008-09-30", + "2008-06-30", + "2008-03-31", + "2007-12-31", + "2007-09-30", + "2007-06-30", + "2007-03-31", + "2006-12-31", + "2006-09-30", + "2006-06-30", + "2006-03-31", + "2005-12-31", + "2005-09-30", + "2005-06-30", + "2005-03-31", + "2004-12-31", + "2004-09-30", + "2004-06-30", + "2004-03-31", + "2003-12-31", + "2003-09-30", + "2003-06-30", + "2003-03-31", + "2002-12-31", + "2002-09-30", + "2002-06-30", + "2002-03-31", + "2001-12-31", + "2001-06-30", + "2000-12-31", + "2000-06-30", + "1999-12-31", + "1999-06-30", + "1998-12-31", + "1998-06-30", + "1997-12-31", + "1997-06-30", + "1996-12-31", + "1995-12-31", + "1994-12-31", + ] + result = IncomeStatement.query_data( + session=session, + provider="eastmoney", + return_type="domain", + codes=["000778"], + end_timestamp="2018-12-30", + order=IncomeStatement.report_date.desc(), + time_field="report_date", + ) assert len(correct_timestamps) == len(result) timestamps = [to_time_str(item.report_date) for item in result] assert set(correct_timestamps) == set(timestamps) @@ -459,21 +996,89 @@ def test_000778_income_statement(): # 银行现金流量表 def test_000778_cash_flow_statement(): - correct_timestamps = ['2018-09-30', '2018-06-30', '2018-03-31', '2017-12-31', '2017-09-30', '2017-06-30', - '2017-03-31', '2016-12-31', '2016-09-30', '2016-06-30', '2016-03-31', '2015-12-31', - '2015-09-30', '2015-06-30', '2015-03-31', '2014-12-31', '2014-09-30', '2014-06-30', - '2014-03-31', '2013-12-31', '2013-09-30', '2013-06-30', '2013-03-31', '2012-12-31', - '2012-09-30', '2012-06-30', '2012-03-31', '2011-12-31', '2011-09-30', '2011-06-30', - '2011-03-31', '2010-12-31', '2010-09-30', '2010-06-30', '2010-03-31', '2009-12-31', - '2009-09-30', '2009-06-30', '2009-03-31', '2008-12-31', '2008-09-30', '2008-06-30', - '2008-03-31', '2007-12-31', '2007-09-30', '2007-06-30', '2007-03-31', '2006-12-31', - '2006-09-30', '2006-06-30', '2006-03-31', '2005-12-31', '2005-09-30', '2005-06-30', - '2005-03-31', '2004-12-31', '2004-09-30', '2004-06-30', '2004-03-31', '2003-12-31', - '2003-09-30', '2003-06-30', '2003-03-31', '2002-12-31', '2002-06-30', '2001-12-31', - '2001-06-30', '2000-12-31', '2000-06-30', '1999-12-31', '1998-12-31', '1998-06-30'] - result = CashFlowStatement.query_data(session=session, provider='eastmoney', return_type='domain', - codes=['000778'], end_timestamp='2018-12-30', - order=CashFlowStatement.report_date.desc(), time_field='report_date') + correct_timestamps = [ + "2018-09-30", + "2018-06-30", + "2018-03-31", + "2017-12-31", + "2017-09-30", + "2017-06-30", + "2017-03-31", + "2016-12-31", + "2016-09-30", + "2016-06-30", + "2016-03-31", + "2015-12-31", + "2015-09-30", + "2015-06-30", + "2015-03-31", + "2014-12-31", + "2014-09-30", + "2014-06-30", + "2014-03-31", + "2013-12-31", + "2013-09-30", + "2013-06-30", + "2013-03-31", + "2012-12-31", + "2012-09-30", + "2012-06-30", + "2012-03-31", + "2011-12-31", + "2011-09-30", + "2011-06-30", + "2011-03-31", + "2010-12-31", + "2010-09-30", + "2010-06-30", + "2010-03-31", + "2009-12-31", + "2009-09-30", + "2009-06-30", + "2009-03-31", + "2008-12-31", + "2008-09-30", + "2008-06-30", + "2008-03-31", + "2007-12-31", + "2007-09-30", + "2007-06-30", + "2007-03-31", + "2006-12-31", + "2006-09-30", + "2006-06-30", + "2006-03-31", + "2005-12-31", + "2005-09-30", + "2005-06-30", + "2005-03-31", + "2004-12-31", + "2004-09-30", + "2004-06-30", + "2004-03-31", + "2003-12-31", + "2003-09-30", + "2003-06-30", + "2003-03-31", + "2002-12-31", + "2002-06-30", + "2001-12-31", + "2001-06-30", + "2000-12-31", + "2000-06-30", + "1999-12-31", + "1998-12-31", + "1998-06-30", + ] + result = CashFlowStatement.query_data( + session=session, + provider="eastmoney", + return_type="domain", + codes=["000778"], + end_timestamp="2018-12-30", + order=CashFlowStatement.report_date.desc(), + time_field="report_date", + ) assert len(correct_timestamps) == len(result) timestamps = [to_time_str(item.report_date) for item in result] assert set(correct_timestamps) == set(timestamps) diff --git a/tests/api/test_holder.py b/tests/api/test_holder.py index 5fa49fa4..2a75f76f 100644 --- a/tests/api/test_holder.py +++ b/tests/api/test_holder.py @@ -7,18 +7,22 @@ from zvt.domain import TopTenHolder, TopTenTradableHolder -session = get_db_session(provider='eastmoney', db_name='holder') # type: sqlalchemy.orm.Session +session = get_db_session(provider="eastmoney", db_name="holder") # type: sqlalchemy.orm.Session # 十大股东 def test_000778_top_ten_holder(): - result: List[TopTenHolder] = TopTenHolder.query_data(session=session, provider='eastmoney', - return_type='domain', - codes=['000778'], end_timestamp='2018-09-30', - start_timestamp='2018-09-30', - order=TopTenHolder.shareholding_ratio.desc()) + result: List[TopTenHolder] = TopTenHolder.query_data( + session=session, + provider="eastmoney", + return_type="domain", + codes=["000778"], + end_timestamp="2018-09-30", + start_timestamp="2018-09-30", + order=TopTenHolder.shareholding_ratio.desc(), + ) assert len(result) == 10 - assert result[0].holder_name == '新兴际华集团有限公司' + assert result[0].holder_name == "新兴际华集团有限公司" assert result[0].shareholding_numbers == 1595000000 assert result[0].shareholding_ratio == 0.3996 assert result[0].change == 32080000 @@ -26,13 +30,17 @@ def test_000778_top_ten_holder(): def test_000778_top_ten_tradable_holder(): - result: List[TopTenHolder] = TopTenTradableHolder.query_data(session=session, provider='eastmoney', - return_type='domain', - codes=['000778'], end_timestamp='2018-09-30', - start_timestamp='2018-09-30', - order=TopTenTradableHolder.shareholding_ratio.desc()) + result: List[TopTenHolder] = TopTenTradableHolder.query_data( + session=session, + provider="eastmoney", + return_type="domain", + codes=["000778"], + end_timestamp="2018-09-30", + start_timestamp="2018-09-30", + order=TopTenTradableHolder.shareholding_ratio.desc(), + ) assert len(result) == 10 - assert result[0].holder_name == '新兴际华集团有限公司' + assert result[0].holder_name == "新兴际华集团有限公司" assert result[0].shareholding_numbers == 1525000000 assert result[0].shareholding_ratio == 0.389 assert result[0].change == 38560000 diff --git a/tests/api/test_intent.py b/tests/api/test_intent.py new file mode 100644 index 00000000..999befcb --- /dev/null +++ b/tests/api/test_intent.py @@ -0,0 +1,74 @@ +# -*- coding: utf-8 -*- +from zvt.api.intent import compare, distribute, composite, composite_all +from zvt.contract.drawer import ChartType +from zvt.domain import FinanceFactor, CashFlowStatement, BalanceSheet, Stock1dKdata +from zvt.utils.time_utils import to_pd_timestamp + + +def test_compare_kdata(): + entity_ids = ["stock_sz_000338", "stock_sh_601318"] + compare(entity_ids=entity_ids, scale_value=10) + compare(entity_ids=entity_ids, start_timestamp="2010-01-01") + + +def test_compare_line(): + entity_ids = ["stock_sz_000338", "stock_sh_601318"] + compare(entity_ids=entity_ids, schema_map_columns={FinanceFactor: [FinanceFactor.roe]}) + + +def test_compare_scatter(): + entity_ids = ["stock_sz_000338", "stock_sh_601318"] + compare( + entity_ids=entity_ids, schema_map_columns={FinanceFactor: [FinanceFactor.roe]}, chart_type=ChartType.scatter + ) + + +def test_compare_area(): + entity_ids = ["stock_sz_000338", "stock_sh_601318"] + compare(entity_ids=entity_ids, schema_map_columns={FinanceFactor: [FinanceFactor.roe]}, chart_type=ChartType.area) + + +def test_compare_bar(): + entity_ids = ["stock_sz_000338", "stock_sh_601318"] + compare(entity_ids=entity_ids, schema_map_columns={FinanceFactor: [FinanceFactor.roe]}, chart_type=ChartType.bar) + + +def test_distribute(): + distribute(entity_ids=None, data_schema=FinanceFactor, columns=["roe"]) + + +def test_composite(): + composite( + entity_id="stock_sz_000338", + data_schema=CashFlowStatement, + columns=[ + CashFlowStatement.net_op_cash_flows, + CashFlowStatement.net_investing_cash_flows, + CashFlowStatement.net_financing_cash_flows, + ], + filters=[ + CashFlowStatement.report_period == "year", + CashFlowStatement.report_date == to_pd_timestamp("2016-12-31"), + ], + ) + composite( + entity_id="stock_sz_000338", + data_schema=BalanceSheet, + columns=[ + BalanceSheet.total_current_assets, + BalanceSheet.total_non_current_assets, + BalanceSheet.total_current_liabilities, + BalanceSheet.total_non_current_liabilities, + ], + filters=[BalanceSheet.report_period == "year", BalanceSheet.report_date == to_pd_timestamp("2016-12-31")], + ) + + +def test_composite_all(): + composite_all( + provider="joinquant", + entity_ids=None, + data_schema=Stock1dKdata, + column=Stock1dKdata.turnover, + timestamp=to_pd_timestamp("2016-12-02"), + ) diff --git a/tests/api/test_joinquant_quotes.py b/tests/api/test_joinquant_quotes.py index 713bb164..bc18ef36 100644 --- a/tests/api/test_joinquant_quotes.py +++ b/tests/api/test_joinquant_quotes.py @@ -1,21 +1,21 @@ -from zvt.api.quote import get_kdata +from zvt.api.kdata import get_kdata from zvt.contract import IntervalLevel from zvt.contract.api import get_db_session from ..context import init_test_context init_test_context() -day_k_session = get_db_session(provider='joinquant', - db_name='stock_1d_kdata') # type: sqlalchemy.orm.Session +day_k_session = get_db_session(provider="joinquant", db_name="stock_1d_kdata") # type: sqlalchemy.orm.Session -day_1h_session = get_db_session(provider='joinquant', - db_name='stock_1h_kdata') # type: sqlalchemy.orm.Session +day_1h_session = get_db_session(provider="joinquant", db_name="stock_1h_kdata") # type: sqlalchemy.orm.Session def test_jq_603220_kdata(): - df = get_kdata(entity_id='stock_sh_603220', session=day_k_session, level=IntervalLevel.LEVEL_1DAY, - provider='joinquant') + df = get_kdata( + entity_id="stock_sh_603220", session=day_k_session, level=IntervalLevel.LEVEL_1DAY, provider="joinquant" + ) print(df) - df = get_kdata(entity_id='stock_sh_603220', session=day_1h_session, level=IntervalLevel.LEVEL_1HOUR, - provider='joinquant') + df = get_kdata( + entity_id="stock_sh_603220", session=day_1h_session, level=IntervalLevel.LEVEL_1HOUR, provider="joinquant" + ) print(df) diff --git a/tests/api/test_kdata.py b/tests/api/test_kdata.py new file mode 100644 index 00000000..bde95cc7 --- /dev/null +++ b/tests/api/test_kdata.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +from zvt.api.kdata import get_kdata +from zvt.api.kdata import get_latest_kdata_date +from zvt.contract import IntervalLevel, AdjustType + + +def test_jq_1mon_kdata(): + df = get_kdata(entity_id="stock_sz_000338", provider="joinquant", level=IntervalLevel.LEVEL_1MON) + se = df.loc["2010-01-29"] + # make sure our fq is ok + assert round(se["open"], 2) <= 5.44 + assert round(se["high"], 2) <= 6.43 + assert round(se["low"], 2) <= 5.2 + assert round(se["close"], 2) <= 5.45 + + +def test_jq_1wk_kdata(): + df = get_kdata(entity_id="stock_sz_000338", provider="joinquant", level=IntervalLevel.LEVEL_1WEEK) + print(df) + + +def test_jq_1d_kdata(): + df = get_kdata(entity_id="stock_sz_000338", provider="joinquant", level=IntervalLevel.LEVEL_1DAY) + print(df) + + se = df.loc["2019-04-08"] + # make sure our fq is ok + assert round(se["open"], 2) <= 12.86 + assert round(se["high"], 2) <= 14.16 + assert round(se["low"], 2) <= 12.86 + assert round(se["close"], 2) <= 14.08 + + +def test_jq_1d_hfq_kdata(): + df = get_kdata(entity_id="stock_sz_000338", provider="joinquant", level=IntervalLevel.LEVEL_1DAY, adjust_type="hfq") + se = df.loc["2019-04-08"] + print(se) + assert round(se["open"], 2) == 249.29 + assert round(se["high"], 2) == 273.68 + assert round(se["low"], 2) == 249.29 + assert round(se["close"], 2) == 272.18 + + +def test_get_latest_kdata_date(): + date = get_latest_kdata_date(provider="joinquant", entity_type="stock", adjust_type=AdjustType.hfq) + assert date is not None diff --git a/tests/api/test_quote.py b/tests/api/test_quote.py deleted file mode 100644 index a22156ed..00000000 --- a/tests/api/test_quote.py +++ /dev/null @@ -1,40 +0,0 @@ -# -*- coding: utf-8 -*- -from zvt.api.quote import get_kdata -from zvt.contract import IntervalLevel - - -def test_jq_1mon_kdata(): - df = get_kdata(entity_id='stock_sz_000338', provider='joinquant', level=IntervalLevel.LEVEL_1MON) - se = df.loc['2010-01-29'] - # make sure our fq is ok - assert round(se['open'], 2) <= 5.44 - assert round(se['high'], 2) <= 6.43 - assert round(se['low'], 2) <= 5.2 - assert round(se['close'], 2) <= 5.45 - - -def test_jq_1wk_kdata(): - df = get_kdata(entity_id='stock_sz_000338', provider='joinquant', level=IntervalLevel.LEVEL_1WEEK) - print(df) - - -def test_jq_1d_kdata(): - df = get_kdata(entity_id='stock_sz_000338', provider='joinquant', level=IntervalLevel.LEVEL_1DAY) - print(df) - - se = df.loc['2019-04-08'] - # make sure our fq is ok - assert round(se['open'], 2) <= 12.86 - assert round(se['high'], 2) <= 14.16 - assert round(se['low'], 2) <= 12.86 - assert round(se['close'], 2) <= 14.08 - - -def test_jq_1d_hfq_kdata(): - df = get_kdata(entity_id='stock_sz_000338', provider='joinquant', level=IntervalLevel.LEVEL_1DAY, adjust_type='hfq') - se = df.loc['2019-04-08'] - print(se) - assert round(se['open'], 2) == 249.29 - assert round(se['high'], 2) == 273.68 - assert round(se['low'], 2) == 249.29 - assert round(se['close'], 2) == 272.18 diff --git a/tests/api/test_technical.py b/tests/api/test_technical.py index 6efeccb5..e0b8f9df 100644 --- a/tests/api/test_technical.py +++ b/tests/api/test_technical.py @@ -5,9 +5,8 @@ init_test_context() - def test_basic_get_securities(): - items = get_entities(entity_type='stock', provider='eastmoney') + items = get_entities(entity_type="stock", provider="eastmoney") + print(items) + items = get_entities(entity_type="index", provider="exchange") print(items) - items = get_entities(entity_type='index', provider='eastmoney') - print(items) \ No newline at end of file diff --git a/tests/api/test_trading.py b/tests/api/test_trading.py index 777aa347..0a4d177d 100644 --- a/tests/api/test_trading.py +++ b/tests/api/test_trading.py @@ -7,19 +7,22 @@ from zvt.domain import HolderTrading, ManagerTrading -session = get_db_session(provider='eastmoney', db_name='trading') # type: sqlalchemy.orm.Session +session = get_db_session(provider="eastmoney", db_name="trading") # type: sqlalchemy.orm.Session # 股东交易 def test_000778_holder_trading(): - result: List[HolderTrading] = HolderTrading.query_data(session=session, provider='eastmoney', - return_type='domain', - codes=['000778'], - end_timestamp='2018-09-30', - start_timestamp='2018-09-30', - order=HolderTrading.holding_pct.desc()) + result: List[HolderTrading] = HolderTrading.query_data( + session=session, + provider="eastmoney", + return_type="domain", + codes=["000778"], + end_timestamp="2018-09-30", + start_timestamp="2018-09-30", + order=HolderTrading.holding_pct.desc(), + ) assert len(result) == 6 - assert result[0].holder_name == '新兴际华集团有限公司' + assert result[0].holder_name == "新兴际华集团有限公司" assert result[0].change_pct == 0.0205 assert result[0].volume == 32080000 assert result[0].holding_pct == 0.3996 @@ -27,18 +30,21 @@ def test_000778_holder_trading(): # 高管交易 def test_000778_manager_trading(): - result: List[ManagerTrading] = ManagerTrading.query_data(session=session, provider='eastmoney', - return_type='domain', - codes=['000778'], - end_timestamp='2018-09-30', - start_timestamp='2017-09-30', - order=ManagerTrading.holding.desc()) + result: List[ManagerTrading] = ManagerTrading.query_data( + session=session, + provider="eastmoney", + return_type="domain", + codes=["000778"], + end_timestamp="2018-09-30", + start_timestamp="2017-09-30", + order=ManagerTrading.holding.desc(), + ) assert len(result) == 1 - assert result[0].trading_person == '巩国平' + assert result[0].trading_person == "巩国平" assert result[0].volume == 8400 assert result[0].price == None assert result[0].holding == 18700 - assert result[0].trading_way == '增持' - assert result[0].manager_position == '职工监事' - assert result[0].manager == '巩国平' - assert result[0].relationship_with_manager == '本人' + assert result[0].trading_way == "增持" + assert result[0].manager_position == "职工监事" + assert result[0].manager == "巩国平" + assert result[0].relationship_with_manager == "本人" diff --git a/tests/context.py b/tests/context.py index 13ad2903..adc0a213 100644 --- a/tests/context.py +++ b/tests/context.py @@ -1,7 +1,8 @@ # -*- coding: utf-8 -*- + def init_test_context(): import os import sys - sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) + sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "../src"))) diff --git a/tests/contract/__init__.py b/tests/contract/__init__.py index 7c68785e..40a96afc 100644 --- a/tests/contract/__init__.py +++ b/tests/contract/__init__.py @@ -1 +1 @@ -# -*- coding: utf-8 -*- \ No newline at end of file +# -*- coding: utf-8 -*- diff --git a/tests/contract/test_add_provider_plugin.py b/tests/contract/test_add_provider_plugin.py index 4d757278..4c6297a2 100644 --- a/tests/contract/test_add_provider_plugin.py +++ b/tests/contract/test_add_provider_plugin.py @@ -2,16 +2,14 @@ def test_add_tushare_provider(): - from zvt.contract.register import register_schema - from zvt.domain.meta.stock_meta import StockMetaBase - - register_schema(providers=['tushare'], db_name='stock_meta', - schema_base=StockMetaBase) - - from zvt.domain import Stock - Stock.query_data(provider='tushare') - try: - Stock.record_data(provider='tushare') - assert False - except Exception as e: - print(e) + pass + # register_schema(providers=["tushare"], db_name="stock_meta", schema_base=StockMetaBase) + # + # from zvt.domain import Stock + # + # Stock.query_data(provider="tushare") + # try: + # Stock.record_data(provider="tushare") + # assert False + # except Exception as e: + # print(e) diff --git a/tests/contract/test_entity.py b/tests/contract/test_entity.py index 5bd8d0a9..fdb3e266 100644 --- a/tests/contract/test_entity.py +++ b/tests/contract/test_entity.py @@ -1,61 +1,64 @@ # -*- coding: utf-8 -*- -from zvt.contract import EntityMixin, IntervalLevel +from zvt.contract import TradableEntity, IntervalLevel from zvt.utils.time_utils import to_pd_timestamp def test_get_1min_timestamps(): timestamps = [] - for timestamp in EntityMixin.get_interval_timestamps(start_date='2020-06-17', end_date='2020-06-18', - level=IntervalLevel.LEVEL_1MIN): + for timestamp in TradableEntity.get_interval_timestamps( + start_date="2020-06-17", end_date="2020-06-18", level=IntervalLevel.LEVEL_1MIN + ): timestamps.append(timestamp) - assert to_pd_timestamp('2020-06-17 09:31:00') in timestamps - assert to_pd_timestamp('2020-06-17 11:30:00') in timestamps - assert to_pd_timestamp('2020-06-17 13:01:00') in timestamps - assert to_pd_timestamp('2020-06-17 15:00:00') in timestamps + assert to_pd_timestamp("2020-06-17 09:31:00") in timestamps + assert to_pd_timestamp("2020-06-17 11:30:00") in timestamps + assert to_pd_timestamp("2020-06-17 13:01:00") in timestamps + assert to_pd_timestamp("2020-06-17 15:00:00") in timestamps - assert to_pd_timestamp('2020-06-17 09:31:00') in timestamps - assert to_pd_timestamp('2020-06-17 11:30:00') in timestamps - assert to_pd_timestamp('2020-06-17 13:01:00') in timestamps - assert to_pd_timestamp('2020-06-18 15:00:00') in timestamps + assert to_pd_timestamp("2020-06-17 09:31:00") in timestamps + assert to_pd_timestamp("2020-06-17 11:30:00") in timestamps + assert to_pd_timestamp("2020-06-17 13:01:00") in timestamps + assert to_pd_timestamp("2020-06-18 15:00:00") in timestamps def test_get_1h_timestamps(): timestamps = [] - for timestamp in EntityMixin.get_interval_timestamps(start_date='2020-06-17', end_date='2020-06-18', - level=IntervalLevel.LEVEL_1HOUR): + for timestamp in TradableEntity.get_interval_timestamps( + start_date="2020-06-17", end_date="2020-06-18", level=IntervalLevel.LEVEL_1HOUR + ): timestamps.append(timestamp) - assert to_pd_timestamp('2020-06-17 10:30:00') in timestamps - assert to_pd_timestamp('2020-06-17 11:30:00') in timestamps - assert to_pd_timestamp('2020-06-17 14:00:00') in timestamps - assert to_pd_timestamp('2020-06-17 15:00:00') in timestamps + assert to_pd_timestamp("2020-06-17 10:30:00") in timestamps + assert to_pd_timestamp("2020-06-17 11:30:00") in timestamps + assert to_pd_timestamp("2020-06-17 14:00:00") in timestamps + assert to_pd_timestamp("2020-06-17 15:00:00") in timestamps - assert to_pd_timestamp('2020-06-17 10:30:00') in timestamps - assert to_pd_timestamp('2020-06-17 11:30:00') in timestamps - assert to_pd_timestamp('2020-06-17 14:00:00') in timestamps - assert to_pd_timestamp('2020-06-18 15:00:00') in timestamps + assert to_pd_timestamp("2020-06-17 10:30:00") in timestamps + assert to_pd_timestamp("2020-06-17 11:30:00") in timestamps + assert to_pd_timestamp("2020-06-17 14:00:00") in timestamps + assert to_pd_timestamp("2020-06-18 15:00:00") in timestamps def test_is_finished_kdata_timestamp(): - assert EntityMixin.is_finished_kdata_timestamp('2020-06-17 10:30', IntervalLevel.LEVEL_30MIN) - assert not EntityMixin.is_finished_kdata_timestamp('2020-06-17 10:30', IntervalLevel.LEVEL_1DAY) + assert TradableEntity.is_finished_kdata_timestamp("2020-06-17 10:30", IntervalLevel.LEVEL_30MIN) + assert not TradableEntity.is_finished_kdata_timestamp("2020-06-17 10:30", IntervalLevel.LEVEL_1DAY) - assert EntityMixin.is_finished_kdata_timestamp('2020-06-17 11:30', IntervalLevel.LEVEL_30MIN) - assert not EntityMixin.is_finished_kdata_timestamp('2020-06-17 11:30', IntervalLevel.LEVEL_1DAY) + assert TradableEntity.is_finished_kdata_timestamp("2020-06-17 11:30", IntervalLevel.LEVEL_30MIN) + assert not TradableEntity.is_finished_kdata_timestamp("2020-06-17 11:30", IntervalLevel.LEVEL_1DAY) - assert EntityMixin.is_finished_kdata_timestamp('2020-06-17 13:30', IntervalLevel.LEVEL_30MIN) - assert not EntityMixin.is_finished_kdata_timestamp('2020-06-17 13:30', IntervalLevel.LEVEL_1DAY) + assert TradableEntity.is_finished_kdata_timestamp("2020-06-17 13:30", IntervalLevel.LEVEL_30MIN) + assert not TradableEntity.is_finished_kdata_timestamp("2020-06-17 13:30", IntervalLevel.LEVEL_1DAY) def test_open_close(): - assert EntityMixin.is_open_timestamp('2020-06-17 09:30') - assert EntityMixin.is_close_timestamp('2020-06-17 15:00') + assert TradableEntity.is_open_timestamp("2020-06-17 09:30") + assert TradableEntity.is_close_timestamp("2020-06-17 15:00") timestamps = [] - for timestamp in EntityMixin.get_interval_timestamps(start_date='2020-06-17', end_date='2020-06-18', - level=IntervalLevel.LEVEL_1HOUR): + for timestamp in TradableEntity.get_interval_timestamps( + start_date="2020-06-17", end_date="2020-06-18", level=IntervalLevel.LEVEL_1HOUR + ): timestamps.append(timestamp) - assert EntityMixin.is_open_timestamp(timestamps[0]) - assert EntityMixin.is_close_timestamp(timestamps[-1]) + assert TradableEntity.is_open_timestamp(timestamps[0]) + assert TradableEntity.is_close_timestamp(timestamps[-1]) diff --git a/tests/contract/test_reader.py b/tests/contract/test_reader.py index b33e913d..ba30c699 100644 --- a/tests/contract/test_reader.py +++ b/tests/contract/test_reader.py @@ -14,42 +14,53 @@ def test_china_stock_reader(): - data_reader = DataReader(codes=['002572', '000338'], data_schema=Stock1dKdata, entity_schema=Stock, - start_timestamp='2019-01-01', - end_timestamp='2019-06-10', entity_provider='eastmoney') + data_reader = DataReader( + provider="joinquant", + data_schema=Stock1dKdata, + entity_schema=Stock, + entity_provider="eastmoney", + codes=["002572", "000338"], + start_timestamp="2019-01-01", + end_timestamp="2019-06-10", + ) categories = data_reader.data_df.index.levels[0].to_list() df = data_reader.data_df - assert 'stock_sz_002572' in categories - assert 'stock_sz_000338' in categories + assert "stock_sz_002572" in categories + assert "stock_sz_000338" in categories - assert ('stock_sz_002572', '2019-01-02') in df.index - assert ('stock_sz_000338', '2019-01-02') in df.index - assert ('stock_sz_002572', '2019-06-10') in df.index - assert ('stock_sz_000338', '2019-06-10') in df.index + assert ("stock_sz_002572", "2019-01-02") in df.index + assert ("stock_sz_000338", "2019-01-02") in df.index + assert ("stock_sz_002572", "2019-06-10") in df.index + assert ("stock_sz_000338", "2019-06-10") in df.index - for timestamp in Stock.get_interval_timestamps(start_date='2019-06-11', - end_date='2019-06-14', - level=IntervalLevel.LEVEL_1DAY): + for timestamp in Stock.get_interval_timestamps( + start_date="2019-06-11", end_date="2019-06-14", level=IntervalLevel.LEVEL_1DAY + ): data_reader.move_on(to_timestamp=timestamp) df = data_reader.data_df - assert ('stock_sz_002572', timestamp) in df.index - assert ('stock_sz_000338', to_time_str(timestamp)) in df.index + assert ("stock_sz_002572", timestamp) in df.index + assert ("stock_sz_000338", to_time_str(timestamp)) in df.index def test_reader_move_on(): - data_reader = DataReader(codes=['002572', '000338'], data_schema=Stock1dKdata, entity_schema=Stock, - start_timestamp='2019-06-13', - end_timestamp='2019-06-14', entity_provider='eastmoney') - - data_reader.move_on(to_timestamp='2019-06-15') - assert ('stock_sz_002572', '2019-06-15') not in data_reader.data_df.index - assert ('stock_sz_000338', '2019-06-15') not in data_reader.data_df.index + data_reader = DataReader( + data_schema=Stock1dKdata, + entity_schema=Stock, + entity_provider="eastmoney", + codes=["002572", "000338"], + start_timestamp="2019-06-13", + end_timestamp="2019-06-14", + ) + + data_reader.move_on(to_timestamp="2019-06-15") + assert ("stock_sz_002572", "2019-06-15") not in data_reader.data_df.index + assert ("stock_sz_000338", "2019-06-15") not in data_reader.data_df.index start_time = time.time() - data_reader.move_on(to_timestamp='2019-06-20', timeout=5) + data_reader.move_on(to_timestamp="2019-06-20", timeout=5) assert time.time() - start_time < 5 diff --git a/tests/contract/test_schema.py b/tests/contract/test_schema.py new file mode 100644 index 00000000..b0b6caef --- /dev/null +++ b/tests/contract/test_schema.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +from zvt.domain import Stock, Stockhk + + +def test_stock_trading_time(): + assert Stock.in_real_trading_time(timestamp="2024-09-02 08:00") is False + assert Stock.in_real_trading_time(timestamp="2024-09-02 09:20") is True + assert Stock.in_real_trading_time(timestamp="2024-09-02 09:30") is True + assert Stock.in_real_trading_time(timestamp="2024-09-02 11:00") is True + assert Stock.in_real_trading_time(timestamp="2024-09-02 11:30") is True + assert Stock.in_real_trading_time(timestamp="2024-09-02 11:40") is False + assert Stock.in_real_trading_time(timestamp="2024-09-02 13:00") is True + assert Stock.in_real_trading_time(timestamp="2024-09-02 15:00") is True + assert Stock.in_real_trading_time(timestamp="2024-09-02 15:10") is False + assert Stock.in_real_trading_time(timestamp="2024-09-02 16:10") is False + + assert Stock.in_trading_time(timestamp="2024-09-02 08:00") is False + assert Stock.in_trading_time(timestamp="2024-09-02 09:20") is True + assert Stock.in_trading_time(timestamp="2024-09-02 09:30") is True + assert Stock.in_trading_time(timestamp="2024-09-02 11:00") is True + assert Stock.in_trading_time(timestamp="2024-09-02 11:30") is True + assert Stock.in_trading_time(timestamp="2024-09-02 11:40") is True + assert Stock.in_trading_time(timestamp="2024-09-02 13:00") is True + assert Stock.in_trading_time(timestamp="2024-09-02 15:00") is True + assert Stock.in_trading_time(timestamp="2024-09-02 15:10") is False + assert Stock.in_trading_time(timestamp="2024-09-02 16:10") is False + + +def test_stock_hk_trading_time(): + assert Stockhk.in_real_trading_time(timestamp="2024-09-02 08:00") is False + assert Stockhk.in_real_trading_time(timestamp="2024-09-02 09:15") is True + assert Stockhk.in_real_trading_time(timestamp="2024-09-02 09:30") is True + assert Stockhk.in_real_trading_time(timestamp="2024-09-02 11:00") is True + assert Stockhk.in_real_trading_time(timestamp="2024-09-02 12:00") is True + assert Stockhk.in_real_trading_time(timestamp="2024-09-02 12:40") is False + assert Stockhk.in_real_trading_time(timestamp="2024-09-02 13:00") is True + assert Stockhk.in_real_trading_time(timestamp="2024-09-02 15:00") is True + assert Stockhk.in_real_trading_time(timestamp="2024-09-02 16:10") is False + assert Stockhk.in_real_trading_time(timestamp="2024-09-02 17:10") is False + + assert Stockhk.in_trading_time(timestamp="2024-09-02 08:00") is False + assert Stockhk.in_trading_time(timestamp="2024-09-02 09:20") is True + assert Stockhk.in_trading_time(timestamp="2024-09-02 09:30") is True + assert Stockhk.in_trading_time(timestamp="2024-09-02 11:00") is True + assert Stockhk.in_trading_time(timestamp="2024-09-02 11:30") is True + assert Stockhk.in_trading_time(timestamp="2024-09-02 11:40") is True + assert Stockhk.in_trading_time(timestamp="2024-09-02 12:00") is True + assert Stockhk.in_trading_time(timestamp="2024-09-02 13:00") is True + assert Stockhk.in_trading_time(timestamp="2024-09-02 15:00") is True + assert Stockhk.in_trading_time(timestamp="2024-09-02 16:00") is True + assert Stockhk.in_trading_time(timestamp="2024-09-02 16:10") is False + assert Stockhk.in_trading_time(timestamp="2024-09-02 17:10") is False diff --git a/tests/factors/test_algorithm.py b/tests/factors/test_algorithm.py index 1b16c469..225c8ad9 100644 --- a/tests/factors/test_algorithm.py +++ b/tests/factors/test_algorithm.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -from zvt.factors.algorithm import point_in_range, intersect, intersect_ranges +from zvt.factors.algorithm import point_in_range, intersect, intersect_ranges, combine, distance def test_point_in_range(): @@ -49,3 +49,40 @@ def test_intersect_ranges(): assert intersect_ranges([a, b, c]) == (2, 3) assert intersect_ranges([b, a, c]) == (2, 3) assert intersect_ranges([a, c, b]) == (2, 3) + + +def test_combine(): + a = (1, 2) + b = (1.5, 3) + assert combine(a, b) == (1, 3) + assert combine(b, a) == (1, 3) + + a = (1, 2) + b = (3, 4) + assert combine(a, b) == None + assert combine(b, a) == None + + a = (1, 4) + b = (2, 3) + assert combine(a, b) == (1, 4) + assert combine(b, a) == (1, 4) + + +def test_distance(): + a = (1, 2) + b = (1.5, 3) + assert distance(a, b) == (4.5 / 2 - 1.5) / 1.5 + assert distance(b, a) == (1.5 - 4.5 / 2) / 2.25 + + a = (1, 2) + b = (3, 4) + assert distance(a, b) == (3.5 - 1.5) / 1.5 + assert distance(b, a) == (1.5 - 3.5) / 3.5 + + assert distance(a, b, use_max=True) == 3 + assert distance(b, a, use_max=True) == -3 / 4 + + a = (1, 4) + b = (2, 3) + assert distance(a, b) == 0 + assert distance(b, a) == 0 diff --git a/tests/factors/test_factor_select_targets.py b/tests/factors/test_factor_select_targets.py new file mode 100644 index 00000000..a06095df --- /dev/null +++ b/tests/factors/test_factor_select_targets.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +from zvt.contract import IntervalLevel +from zvt.factors.ma.ma_factor import CrossMaFactor + +from zvt.contract.factor import TargetType +from zvt.factors.macd.macd_factor import BullFactor +from ..context import init_test_context + +init_test_context() + + +def test_cross_ma_select_targets(): + entity_ids = ["stock_sz_000338"] + start_timestamp = "2018-01-01" + end_timestamp = "2019-06-30" + factor = CrossMaFactor( + provider="joinquant", + entity_ids=entity_ids, + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + keep_window=10, + windows=[5, 10], + need_persist=False, + level=IntervalLevel.LEVEL_1DAY, + adjust_type="hfq", + ) + assert "stock_sz_000338" in factor.get_targets(timestamp="2018-01-19") + + +def test_bull_select_targets(): + factor = BullFactor( + start_timestamp="2019-01-01", end_timestamp="2019-06-10", level=IntervalLevel.LEVEL_1DAY, provider="joinquant" + ) + + targets = factor.get_targets(timestamp="2019-05-08", target_type=TargetType.positive) + + assert "stock_sz_000338" not in targets + assert "stock_sz_002572" not in targets + + targets = factor.get_targets("2019-05-08", target_type=TargetType.negative) + assert "stock_sz_000338" in targets + assert "stock_sz_002572" not in targets + + factor.move_on(timeout=0) + + targets = factor.get_targets(timestamp="2019-06-19", target_type=TargetType.positive) + + assert "stock_sz_000338" in targets + + assert "stock_sz_002572" not in targets diff --git a/tests/factors/test_factors.py b/tests/factors/test_factors.py new file mode 100644 index 00000000..4a7d8790 --- /dev/null +++ b/tests/factors/test_factors.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +from zvt.factors.zen.zen_factor import ZenFactor + + +def test_zen_factor(): + z = ZenFactor( + codes=["000338"], + need_persist=False, + provider="joinquant", + ) + z.draw(show=True) + + z = ZenFactor( + codes=["000338", "601318"], + need_persist=True, + provider="joinquant", + ) + z.draw(show=True) diff --git a/tests/factors/test_selector.py b/tests/factors/test_selector.py deleted file mode 100644 index f7108744..00000000 --- a/tests/factors/test_selector.py +++ /dev/null @@ -1,75 +0,0 @@ -# -*- coding: utf-8 -*- -from zvt.contract import IntervalLevel -from zvt.factors.target_selector import TargetSelector -from zvt.factors.ma.ma_factor import CrossMaFactor -from zvt.factors import BullFactor -from ..context import init_test_context - -init_test_context() - - -class TechnicalSelector(TargetSelector): - def init_factors(self, entity_ids, entity_schema, exchanges, codes, the_timestamp, start_timestamp, - end_timestamp, level): - bull_factor = BullFactor(entity_ids=entity_ids, entity_schema=entity_schema, exchanges=exchanges, - codes=codes, the_timestamp=the_timestamp, start_timestamp=start_timestamp, - end_timestamp=end_timestamp, provider='joinquant', level=level, adjust_type='qfq') - - self.filter_factors = [bull_factor] - - -def test_cross_ma_selector(): - entity_ids = ['stock_sz_000338'] - entity_type = 'stock' - start_timestamp = '2018-01-01' - end_timestamp = '2019-06-30' - my_selector = TargetSelector(entity_ids=entity_ids, - entity_schema=entity_type, - start_timestamp=start_timestamp, - end_timestamp=end_timestamp) - # add the factors - my_selector \ - .add_filter_factor(CrossMaFactor(entity_ids=entity_ids, - start_timestamp=start_timestamp, - end_timestamp=end_timestamp, - computing_window=10, - windows=[5, 10], - need_persist=False, - level=IntervalLevel.LEVEL_1DAY, - adjust_type='qfq')) - my_selector.run() - print(my_selector.open_long_df) - print(my_selector.open_short_df) - assert 'stock_sz_000338' in my_selector.get_open_short_targets('2018-01-29') - - -def test_technical_selector(): - selector = TechnicalSelector(start_timestamp='2019-01-01', - end_timestamp='2019-06-10', - level=IntervalLevel.LEVEL_1DAY, - provider='joinquant') - - selector.run() - - print(selector.get_result_df()) - - targets = selector.get_open_long_targets('2019-06-04') - - assert 'stock_sz_000338' not in targets - assert 'stock_sz_000338' not in targets - assert 'stock_sz_002572' not in targets - assert 'stock_sz_002572' not in targets - - targets = selector.get_open_short_targets('2019-06-04') - assert 'stock_sz_000338' in targets - assert 'stock_sz_000338' in targets - assert 'stock_sz_002572' in targets - assert 'stock_sz_002572' in targets - - selector.move_on(timeout=0) - - targets = selector.get_open_long_targets('2019-06-19') - - assert 'stock_sz_000338' in targets - - assert 'stock_sz_002572' not in targets diff --git a/tests/factors/test_technical_factor.py b/tests/factors/test_technical_factor.py index e2bb1e14..f7479156 100644 --- a/tests/factors/test_technical_factor.py +++ b/tests/factors/test_technical_factor.py @@ -10,86 +10,92 @@ def test_ma(): - factor = TechnicalFactor(codes=['000338'], - start_timestamp='2019-01-01', - end_timestamp='2019-06-10', - level=IntervalLevel.LEVEL_1DAY, - provider='joinquant', - computing_window=30, - transformer=MaTransformer(windows=[5, 10, 30]), - adjust_type='qfq') + factor = TechnicalFactor( + provider="joinquant", + codes=["000338"], + start_timestamp="2019-01-01", + end_timestamp="2019-06-10", + level=IntervalLevel.LEVEL_1DAY, + keep_window=30, + transformer=MaTransformer(windows=[5, 10, 30]), + adjust_type="qfq", + ) print(factor.factor_df.tail()) # compare with east money manually - ma5 = factor.factor_df['ma5'] - ma10 = factor.factor_df['ma10'] - ma30 = factor.factor_df['ma30'] + ma5 = factor.factor_df["ma5"] + ma10 = factor.factor_df["ma10"] + ma30 = factor.factor_df["ma30"] - assert round(ma5.loc[('stock_sz_000338', '2019-06-10')], 2) <= 11.23 - assert round(ma10.loc[('stock_sz_000338', '2019-06-10')], 2) <= 11.43 - assert round(ma30.loc[('stock_sz_000338', '2019-06-10')], 2) <= 11.52 + assert round(ma5.loc[("stock_sz_000338", "2019-06-10")], 2) <= 11.23 + assert round(ma10.loc[("stock_sz_000338", "2019-06-10")], 2) <= 11.43 + assert round(ma30.loc[("stock_sz_000338", "2019-06-10")], 2) <= 11.52 - factor.move_on(to_timestamp='2019-06-17') - ma5 = factor.factor_df['ma5'] - ma10 = factor.factor_df['ma10'] - ma30 = factor.factor_df['ma30'] + factor.move_on(to_timestamp="2019-06-17") + ma5 = factor.factor_df["ma5"] + ma10 = factor.factor_df["ma10"] + ma30 = factor.factor_df["ma30"] - assert round(ma5.loc[('stock_sz_000338', '2019-06-17')], 2) <= 12.06 - assert round(ma10.loc[('stock_sz_000338', '2019-06-17')], 2) <= 11.64 - assert round(ma30.loc[('stock_sz_000338', '2019-06-17')], 2) <= 11.50 + assert round(ma5.loc[("stock_sz_000338", "2019-06-17")], 2) <= 12.06 + assert round(ma10.loc[("stock_sz_000338", "2019-06-17")], 2) <= 11.64 + assert round(ma30.loc[("stock_sz_000338", "2019-06-17")], 2) <= 11.50 def test_macd(): - factor = TechnicalFactor(codes=['000338'], - start_timestamp='2019-01-01', - end_timestamp='2019-06-10', - level=IntervalLevel.LEVEL_1DAY, - provider='joinquant', - computing_window=None, - transformer=MacdTransformer(), - adjust_type='qfq') + factor = TechnicalFactor( + provider="joinquant", + codes=["000338"], + start_timestamp="2019-01-01", + end_timestamp="2019-06-10", + level=IntervalLevel.LEVEL_1DAY, + keep_window=None, + transformer=MacdTransformer(), + adjust_type="qfq", + ) print(factor.factor_df.tail()) # compare with east money manually - diff = factor.factor_df['diff'] - dea = factor.factor_df['dea'] - macd = factor.factor_df['macd'] + diff = factor.factor_df["diff"] + dea = factor.factor_df["dea"] + macd = factor.factor_df["macd"] - assert round(diff.loc[('stock_sz_000338', '2019-06-10')], 2) == -0.14 - assert round(dea.loc[('stock_sz_000338', '2019-06-10')], 2) == -0.15 - assert round(macd.loc[('stock_sz_000338', '2019-06-10')], 2) == 0.02 + assert round(diff.loc[("stock_sz_000338", "2019-06-10")], 2) == -0.14 + assert round(dea.loc[("stock_sz_000338", "2019-06-10")], 2) == -0.15 + assert round(macd.loc[("stock_sz_000338", "2019-06-10")], 2) == 0.02 - factor.move_on(to_timestamp='2019-06-17') - diff = factor.factor_df['diff'] - dea = factor.factor_df['dea'] - macd = factor.factor_df['macd'] + factor.move_on(to_timestamp="2019-06-17") + diff = factor.factor_df["diff"] + dea = factor.factor_df["dea"] + macd = factor.factor_df["macd"] - assert round(diff.loc[('stock_sz_000338', '2019-06-17')], 2) == 0.06 - assert round(dea.loc[('stock_sz_000338', '2019-06-17')], 2) == -0.03 - assert round(macd.loc[('stock_sz_000338', '2019-06-17')], 2) <= 0.19 + assert round(diff.loc[("stock_sz_000338", "2019-06-17")], 2) == 0.06 + assert round(dea.loc[("stock_sz_000338", "2019-06-17")], 2) == -0.03 + assert round(macd.loc[("stock_sz_000338", "2019-06-17")], 2) <= 0.19 def test_cross_ma(): - factor = CrossMaFactor(codes=['000338'], - start_timestamp='2019-01-01', - end_timestamp='2019-06-10', - level=IntervalLevel.LEVEL_1DAY, - provider='joinquant', - windows=[5, 10], - adjust_type='qfq') + factor = CrossMaFactor( + codes=["000338"], + start_timestamp="2019-01-01", + end_timestamp="2019-06-10", + level=IntervalLevel.LEVEL_1DAY, + provider="joinquant", + windows=[5, 10], + adjust_type="qfq", + ) print(factor.factor_df.tail()) print(factor.result_df.tail()) - score = factor.result_df['score'] + score = factor.result_df["filter_result"] - assert score[('stock_sz_000338', '2019-06-03')] == True - assert score[('stock_sz_000338', '2019-06-04')] == True - assert ('stock_sz_000338', '2019-06-05') not in score or score[('stock_sz_000338', '2019-06-05')] == False - assert ('stock_sz_000338', '2019-06-06') not in score or score[('stock_sz_000338', '2019-06-06')] == False - assert ('stock_sz_000338', '2019-06-10') not in score or score[('stock_sz_000338', '2019-06-10')] == False + assert score[("stock_sz_000338", "2019-06-03")] == True + assert score[("stock_sz_000338", "2019-06-04")] == True + assert ("stock_sz_000338", "2019-06-05") not in score or score[("stock_sz_000338", "2019-06-05")] == False + assert ("stock_sz_000338", "2019-06-06") not in score or score[("stock_sz_000338", "2019-06-06")] == False + assert ("stock_sz_000338", "2019-06-10") not in score or score[("stock_sz_000338", "2019-06-10")] == False factor.move_on() - score = factor.result_df['score'] - assert score[('stock_sz_000338', '2019-06-17')] == True + score = factor.result_df["filter_result"] + assert score[("stock_sz_000338", "2019-06-17")] == True diff --git a/tests/factors/test_transformers.py b/tests/factors/test_transformers.py index 23c29c10..39ce2bc0 100644 --- a/tests/factors/test_transformers.py +++ b/tests/factors/test_transformers.py @@ -1,11 +1,15 @@ # -*- coding: utf-8 -*- -from zvt.api.quote import get_kdata +from zvt.api.kdata import get_kdata from zvt.factors.algorithm import MaTransformer, MacdTransformer def test_ma_transformer(): - df = get_kdata(entity_id='stock_sz_000338', start_timestamp='2019-01-01', provider='joinquant', - index=['entity_id', 'timestamp']) + df = get_kdata( + entity_id="stock_sz_000338", + start_timestamp="2019-01-01", + provider="joinquant", + index=["entity_id", "timestamp"], + ) t = MaTransformer(windows=[5, 10]) @@ -15,8 +19,12 @@ def test_ma_transformer(): def test_MacdTransformer(): - df = get_kdata(entity_id='stock_sz_000338', start_timestamp='2019-01-01', provider='joinquant', - index=['entity_id', 'timestamp']) + df = get_kdata( + entity_id="stock_sz_000338", + start_timestamp="2019-01-01", + provider="joinquant", + index=["entity_id", "timestamp"], + ) t = MacdTransformer() diff --git a/tests/ml/__init__.py b/tests/ml/__init__.py new file mode 100644 index 00000000..40a96afc --- /dev/null +++ b/tests/ml/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/tests/ml/test_sgd.py b/tests/ml/test_sgd.py new file mode 100644 index 00000000..3246a1cd --- /dev/null +++ b/tests/ml/test_sgd.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +from sklearn.linear_model import SGDClassifier, SGDRegressor +from sklearn.pipeline import make_pipeline +from sklearn.preprocessing import StandardScaler + +from zvt.contract import AdjustType +from zvt.ml import MaStockMLMachine + +start_timestamp = "2015-01-01" +end_timestamp = "2019-01-01" +predict_start_timestamp = "2018-06-01" + + +def test_sgd_classification(): + machine = MaStockMLMachine( + data_provider="joinquant", + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + predict_start_timestamp=predict_start_timestamp, + entity_ids=["stock_sz_000001"], + label_method="behavior_cls", + adjust_type=AdjustType.qfq, + ) + clf = make_pipeline(StandardScaler(), SGDClassifier(max_iter=1000, tol=1e-3)) + machine.train(model=clf) + machine.predict() + machine.draw_result(entity_id="stock_sz_000001") + + +def test_sgd_regressor(): + machine = MaStockMLMachine( + data_provider="joinquant", + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + predict_start_timestamp=predict_start_timestamp, + entity_ids=["stock_sz_000001"], + label_method="raw", + adjust_type=AdjustType.qfq, + ) + reg = make_pipeline(StandardScaler(), SGDRegressor(max_iter=1000, tol=1e-3)) + machine.train(model=reg) + machine.predict() + machine.draw_result(entity_id="stock_sz_000001") diff --git a/tests/recorders/common/test_china_stock_list_recorder.py b/tests/recorders/common/test_china_stock_list_recorder.py index e3ccc73d..4611d335 100644 --- a/tests/recorders/common/test_china_stock_list_recorder.py +++ b/tests/recorders/common/test_china_stock_list_recorder.py @@ -3,11 +3,11 @@ init_test_context() -from zvt.recorders.exchange.china_stock_list_spider import ExchangeChinaStockListRecorder +from zvt.recorders.eastmoney import EastmoneyStockRecorder def test_china_stock_recorder(): - recorder = ExchangeChinaStockListRecorder() + recorder = EastmoneyStockRecorder() try: recorder.run() diff --git a/tests/recorders/eastmoney/test_dividend_financing_recorder.py b/tests/recorders/eastmoney/test_dividend_financing_recorder.py index f89c7e1d..cab3fbae 100644 --- a/tests/recorders/eastmoney/test_dividend_financing_recorder.py +++ b/tests/recorders/eastmoney/test_dividend_financing_recorder.py @@ -9,27 +9,27 @@ def test_dividend_detail(): try: - DividendDetail.record_data(provider='eastmoney', codes=SAMPLE_STOCK_CODES) + DividendDetail.record_data(provider="eastmoney", codes=SAMPLE_STOCK_CODES) except: assert False def test_rights_issue_detail(): try: - RightsIssueDetail.record_data(provider='eastmoney', codes=SAMPLE_STOCK_CODES) + RightsIssueDetail.record_data(provider="eastmoney", codes=SAMPLE_STOCK_CODES) except: assert False def test_spo_detail(): try: - SpoDetail.record_data(provider='eastmoney', codes=SAMPLE_STOCK_CODES) + SpoDetail.record_data(provider="eastmoney", codes=SAMPLE_STOCK_CODES) except: assert False def test_dividend_financing(): try: - DividendFinancing.record_data(provider='eastmoney', codes=SAMPLE_STOCK_CODES) + DividendFinancing.record_data(provider="eastmoney", codes=SAMPLE_STOCK_CODES) except: assert False diff --git a/tests/recorders/eastmoney/test_finance.py b/tests/recorders/eastmoney/test_finance.py index 4d052238..9c5440a1 100644 --- a/tests/recorders/eastmoney/test_finance.py +++ b/tests/recorders/eastmoney/test_finance.py @@ -5,10 +5,10 @@ from zvt.consts import SAMPLE_STOCK_CODES -from zvt.recorders.eastmoney.finance.china_stock_finance_factor_recorder import ChinaStockFinanceFactorRecorder -from zvt.recorders.eastmoney.finance.china_stock_cash_flow_recorder import ChinaStockCashFlowRecorder -from zvt.recorders.eastmoney.finance.china_stock_balance_sheet_recorder import ChinaStockBalanceSheetRecorder -from zvt.recorders.eastmoney.finance.china_stock_income_statement_recorder import ChinaStockIncomeStatementRecorder +from zvt.recorders.eastmoney.finance.eastmoney_finance_factor_recorder import ChinaStockFinanceFactorRecorder +from zvt.recorders.eastmoney.finance.eastmoney_cash_flow_recorder import ChinaStockCashFlowRecorder +from zvt.recorders.eastmoney.finance.eastmoney_balance_sheet_recorder import ChinaStockBalanceSheetRecorder +from zvt.recorders.eastmoney.finance.eastmoney_income_statement_recorder import ChinaStockIncomeStatementRecorder def test_finance_factor_recorder(): diff --git a/tests/recorders/eastmoney/test_holder_recorder.py b/tests/recorders/eastmoney/test_holder_recorder.py index dd606d11..8a1cf797 100644 --- a/tests/recorders/eastmoney/test_holder_recorder.py +++ b/tests/recorders/eastmoney/test_holder_recorder.py @@ -5,8 +5,8 @@ from zvt.consts import SAMPLE_STOCK_CODES -from zvt.recorders.eastmoney.holder.top_ten_holder_recorder import TopTenHolderRecorder -from zvt.recorders.eastmoney.holder.top_ten_tradable_holder_recorder import TopTenTradableHolderRecorder +from zvt.recorders.eastmoney.holder.eastmoney_top_ten_holder_recorder import TopTenHolderRecorder +from zvt.recorders.eastmoney.holder.eastmoney_top_ten_tradable_holder_recorder import TopTenTradableHolderRecorder def test_top_ten_holder_recorder(): diff --git a/tests/recorders/eastmoney/test_meta_recorder.py b/tests/recorders/eastmoney/test_meta_recorder.py index 92cc0b95..293871d4 100644 --- a/tests/recorders/eastmoney/test_meta_recorder.py +++ b/tests/recorders/eastmoney/test_meta_recorder.py @@ -3,13 +3,13 @@ init_test_context() -from zvt.recorders.eastmoney.meta.china_stock_meta_recorder import EastmoneyChinaStockDetailRecorder +from zvt.recorders.eastmoney.meta.eastmoney_stock_meta_recorder import EastmoneyStockDetailRecorder from zvt.consts import SAMPLE_STOCK_CODES def test_meta_recorder(): - recorder = EastmoneyChinaStockDetailRecorder(codes=SAMPLE_STOCK_CODES) + recorder = EastmoneyStockDetailRecorder(codes=SAMPLE_STOCK_CODES) try: recorder.run() except: diff --git a/tests/recorders/eastmoney/test_trading_recorder.py b/tests/recorders/eastmoney/test_trading_recorder.py index 20a11b2b..45bc2b54 100644 --- a/tests/recorders/eastmoney/test_trading_recorder.py +++ b/tests/recorders/eastmoney/test_trading_recorder.py @@ -5,8 +5,8 @@ from zvt.consts import SAMPLE_STOCK_CODES -from zvt.recorders.eastmoney.trading.manager_trading_recorder import ManagerTradingRecorder -from zvt.recorders.eastmoney.trading.holder_trading_recorder import HolderTradingRecorder +from zvt.recorders.eastmoney.trading.eastmoney_manager_trading_recorder import ManagerTradingRecorder +from zvt.recorders.eastmoney.trading.eastmoney_holder_trading_recorder import HolderTradingRecorder def test_manager_trading_recorder(): diff --git a/tests/recorders/em/__init__.py b/tests/recorders/em/__init__.py new file mode 100644 index 00000000..40a96afc --- /dev/null +++ b/tests/recorders/em/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/tests/recorders/em/test_em_api.py b/tests/recorders/em/test_em_api.py new file mode 100644 index 00000000..54f84539 --- /dev/null +++ b/tests/recorders/em/test_em_api.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +from zvt.contract import IntervalLevel, AdjustType +from zvt.recorders.em import em_api + +import requests + + +def test_get_kdata(): + # 上证A股 + session = requests.Session() + df = em_api.get_kdata( + session=session, + entity_id="stock_sh_601318", + level=IntervalLevel.LEVEL_1DAY, + adjust_type=AdjustType.qfq, + limit=5, + ) + print(df) + df = em_api.get_kdata( + session=session, + entity_id="stock_sh_601318", + level=IntervalLevel.LEVEL_1DAY, + adjust_type=AdjustType.hfq, + limit=5, + ) + print(df) + df = em_api.get_kdata( + session=session, + entity_id="stock_sh_601318", + level=IntervalLevel.LEVEL_1DAY, + adjust_type=AdjustType.bfq, + limit=5, + ) + print(df) + # 深圳A股 + df = em_api.get_kdata( + session=session, + entity_id="stock_sz_000338", + level=IntervalLevel.LEVEL_1DAY, + adjust_type=AdjustType.qfq, + limit=5, + ) + print(df) + df = em_api.get_kdata( + session=session, + entity_id="stock_sz_000338", + level=IntervalLevel.LEVEL_1DAY, + adjust_type=AdjustType.hfq, + limit=5, + ) + print(df) + df = em_api.get_kdata( + session=session, + entity_id="stock_sz_000338", + level=IntervalLevel.LEVEL_1DAY, + adjust_type=AdjustType.bfq, + limit=5, + ) + print(df) diff --git a/tests/recorders/em/test_kdata_recorder.py b/tests/recorders/em/test_kdata_recorder.py new file mode 100644 index 00000000..40a96afc --- /dev/null +++ b/tests/recorders/em/test_kdata_recorder.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/tests/recorders/joinquant/test_quote_recorder.py b/tests/recorders/joinquant/test_quote_recorder.py index eb91f307..cfc61368 100644 --- a/tests/recorders/joinquant/test_quote_recorder.py +++ b/tests/recorders/joinquant/test_quote_recorder.py @@ -10,8 +10,9 @@ def test_1wk_kdata_recorder(): - recorder = JqChinaStockKdataRecorder(codes=SAMPLE_STOCK_CODES, sleeping_time=0, level=IntervalLevel.LEVEL_1WEEK, - real_time=False) + recorder = JqChinaStockKdataRecorder( + codes=SAMPLE_STOCK_CODES, sleeping_time=0, level=IntervalLevel.LEVEL_1WEEK, real_time=False + ) try: recorder.run() except: @@ -19,8 +20,9 @@ def test_1wk_kdata_recorder(): def test_1mon_kdata_recorder(): - recorder = JqChinaStockKdataRecorder(codes=SAMPLE_STOCK_CODES, sleeping_time=0, level=IntervalLevel.LEVEL_1MON, - real_time=False) + recorder = JqChinaStockKdataRecorder( + codes=SAMPLE_STOCK_CODES, sleeping_time=0, level=IntervalLevel.LEVEL_1MON, real_time=False + ) try: recorder.run() except: @@ -28,8 +30,9 @@ def test_1mon_kdata_recorder(): def test_1d_kdata_recorder(): - recorder = JqChinaStockKdataRecorder(codes=SAMPLE_STOCK_CODES, sleeping_time=0, level=IntervalLevel.LEVEL_1DAY, - real_time=False) + recorder = JqChinaStockKdataRecorder( + codes=SAMPLE_STOCK_CODES, sleeping_time=0, level=IntervalLevel.LEVEL_1DAY, real_time=False + ) try: recorder.run() except: @@ -37,8 +40,9 @@ def test_1d_kdata_recorder(): def test_1d_hfq_kdata_recorder(): - recorder = JqChinaStockKdataRecorder(codes=['000338'], sleeping_time=0, level=IntervalLevel.LEVEL_1DAY, - real_time=False, adjust_type='hfq') + recorder = JqChinaStockKdataRecorder( + codes=["000338"], sleeping_time=0, level=IntervalLevel.LEVEL_1DAY, real_time=False, adjust_type="hfq" + ) try: recorder.run() except: @@ -46,8 +50,13 @@ def test_1d_hfq_kdata_recorder(): def test_1h_kdata_recorder(): - recorder = JqChinaStockKdataRecorder(codes=['000338'], sleeping_time=0, level=IntervalLevel.LEVEL_1HOUR, - real_time=False, start_timestamp='2019-01-01') + recorder = JqChinaStockKdataRecorder( + codes=["000338"], + sleeping_time=0, + level=IntervalLevel.LEVEL_1HOUR, + real_time=False, + start_timestamp="2019-01-01", + ) try: recorder.run() except: @@ -55,8 +64,9 @@ def test_1h_kdata_recorder(): def test_5m_kdata_recorder(): - recorder = JqChinaStockKdataRecorder(codes=['000338'], sleeping_time=0, level=IntervalLevel.LEVEL_5MIN, - real_time=False, start_timestamp='2019-01-01') + recorder = JqChinaStockKdataRecorder( + codes=["000338"], sleeping_time=0, level=IntervalLevel.LEVEL_5MIN, real_time=False, start_timestamp="2019-01-01" + ) try: recorder.run() except: diff --git a/tests/domain/test_stock_money_flow.py b/tests/recorders/joinquant/test_stock_money_flow.py similarity index 71% rename from tests/domain/test_stock_money_flow.py rename to tests/recorders/joinquant/test_stock_money_flow.py index bff3c5d8..c62d987e 100644 --- a/tests/domain/test_stock_money_flow.py +++ b/tests/recorders/joinquant/test_stock_money_flow.py @@ -3,15 +3,16 @@ def test_stock_money_flow(): - provider = 'joinquant' + provider = "joinquant" # Stock.record_data(provider=provider) - StockMoneyFlow.record_data(codes=['300999', '688981'], provider=provider, start_timestamp='2020-12-14', - compute_index_money_flow=False) + StockMoneyFlow.record_data( + codes=["300999", "688981"], provider=provider, start_timestamp="2020-12-14", compute_index_money_flow=False + ) data_samples = [ { - "id": 'stock_sz_300999_2020-12-14', - "timestamp": '2020-12-14', + "id": "stock_sz_300999_2020-12-14", + "timestamp": "2020-12-14", "code": "300999", "net_main_inflows": 46378.96 * 10000, "net_main_inflow_rate": 9.3 / 100, @@ -22,11 +23,11 @@ def test_stock_money_flow(): "net_medium_inflows": -23493.71 * 10000, "net_medium_inflow_rate": -4.71 / 100, "net_small_inflows": -22885.25 * 10000, - "net_small_inflow_rate": -4.59 / 100} - , + "net_small_inflow_rate": -4.59 / 100, + }, { - "id": 'stock_sh_688981_2020-12-14', - "timestamp": '2020-12-14', + "id": "stock_sh_688981_2020-12-14", + "timestamp": "2020-12-14", "code": "688981", "net_main_inflows": -14523.55 * 10000, "net_main_inflow_rate": -10.77 / 100, @@ -37,7 +38,7 @@ def test_stock_money_flow(): "net_medium_inflows": 6945.23 * 10000, "net_medium_inflow_rate": 5.15 / 100, "net_small_inflows": 7578.32 * 10000, - "net_small_inflow_rate": 5.62 / 100 - } + "net_small_inflow_rate": 5.62 / 100, + }, ] StockMoneyFlow.test_data_correctness(provider=provider, data_samples=data_samples) diff --git a/tests/trader/test_trader.py b/tests/trader/test_trader.py index d2e90d16..21a79c76 100644 --- a/tests/trader/test_trader.py +++ b/tests/trader/test_trader.py @@ -1,28 +1,34 @@ # -*- coding: utf-8 -*- -from zvt.api import get_kdata +from zvt.api.kdata import get_kdata from zvt.contract import IntervalLevel, AdjustType from zvt.samples import MyBullTrader, StockTrader -from zvt.utils import is_same_date +from zvt.utils.time_utils import is_same_date -buy_timestamp = '2019-05-29' -sell_timestamp = '2020-01-06' +buy_timestamp = "2019-05-29" +sell_timestamp = "2020-01-06" class SingleTrader(StockTrader): def on_time(self, timestamp): if is_same_date(buy_timestamp, timestamp): - self.buy(due_timestamp=buy_timestamp, happen_timestamp=buy_timestamp, entity_ids=['stock_sz_000338']) + self.buy(timestamp=buy_timestamp, entity_ids=["stock_sz_000338"]) if is_same_date(sell_timestamp, timestamp): - self.sell(due_timestamp=sell_timestamp, happen_timestamp=sell_timestamp, entity_ids=['stock_sz_000338']) + self.sell(timestamp=sell_timestamp, entity_ids=["stock_sz_000338"]) def long_position_control(self): return 1 def test_single_trader(): - trader = SingleTrader(codes=['000338'], level=IntervalLevel.LEVEL_1DAY, start_timestamp='2019-01-01', - end_timestamp='2020-01-10', trader_name='000338_single_trader', draw_result=False, - adjust_type=AdjustType.qfq) + trader = SingleTrader( + provider="joinquant", + codes=["000338"], + level=IntervalLevel.LEVEL_1DAY, + start_timestamp="2019-01-01", + end_timestamp="2020-01-10", + trader_name="000338_single_trader", + draw_result=True, + ) trader.run() positions = trader.get_current_account().positions @@ -32,10 +38,20 @@ def test_single_trader(): print(account) - buy_price = get_kdata(entity_id='stock_sz_000338', start_timestamp=buy_timestamp, end_timestamp=buy_timestamp, - return_type='domain')[0] - sell_price = get_kdata(entity_id='stock_sz_000338', start_timestamp=sell_timestamp, - end_timestamp=sell_timestamp, return_type='domain')[0] + buy_price = get_kdata( + provider="joinquant", + entity_id="stock_sz_000338", + start_timestamp=buy_timestamp, + end_timestamp=buy_timestamp, + return_type="domain", + )[0] + sell_price = get_kdata( + provider="joinquant", + entity_id="stock_sz_000338", + start_timestamp=sell_timestamp, + end_timestamp=sell_timestamp, + return_type="domain", + )[0] sell_lost = trader.account_service.slippage + trader.account_service.sell_cost buy_lost = trader.account_service.slippage + trader.account_service.buy_cost @@ -51,14 +67,14 @@ class MultipleTrader(StockTrader): def on_time(self, timestamp): if is_same_date(buy_timestamp, timestamp): - self.buy(due_timestamp=buy_timestamp, happen_timestamp=buy_timestamp, - entity_ids=['stock_sz_000338']) + self.buy(timestamp=timestamp, entity_ids=["stock_sz_000338"]) self.has_buy = True - self.buy(due_timestamp=buy_timestamp, happen_timestamp=buy_timestamp, - entity_ids=['stock_sh_601318']) + self.buy(timestamp=timestamp, entity_ids=["stock_sh_601318"]) if is_same_date(sell_timestamp, timestamp): - self.sell(due_timestamp=sell_timestamp, happen_timestamp=sell_timestamp, - entity_ids=['stock_sz_000338', 'stock_sh_601318']) + self.sell( + timestamp=timestamp, + entity_ids=["stock_sz_000338", "stock_sh_601318"], + ) def long_position_control(self): if self.has_buy: @@ -70,9 +86,16 @@ def long_position_control(self): def test_multiple_trader(): - trader = MultipleTrader(codes=['000338', '601318'], level=IntervalLevel.LEVEL_1DAY, start_timestamp='2019-01-01', - end_timestamp='2020-01-10', trader_name='multiple_trader', draw_result=False, - adjust_type=AdjustType.qfq) + trader = MultipleTrader( + provider="joinquant", + codes=["000338", "601318"], + level=IntervalLevel.LEVEL_1DAY, + start_timestamp="2019-01-01", + end_timestamp="2020-01-10", + trader_name="multiple_trader", + draw_result=False, + adjust_type=AdjustType.qfq, + ) trader.run() positions = trader.get_current_account().positions @@ -83,20 +106,40 @@ def test_multiple_trader(): print(account) # 000338 - buy_price = get_kdata(entity_id='stock_sz_000338', start_timestamp=buy_timestamp, end_timestamp=buy_timestamp, - return_type='domain')[0] - sell_price = get_kdata(entity_id='stock_sz_000338', start_timestamp=sell_timestamp, - end_timestamp=sell_timestamp, return_type='domain')[0] + buy_price = get_kdata( + provider="joinquant", + entity_id="stock_sz_000338", + start_timestamp=buy_timestamp, + end_timestamp=buy_timestamp, + return_type="domain", + )[0] + sell_price = get_kdata( + provider="joinquant", + entity_id="stock_sz_000338", + start_timestamp=sell_timestamp, + end_timestamp=sell_timestamp, + return_type="domain", + )[0] sell_lost = trader.account_service.slippage + trader.account_service.sell_cost buy_lost = trader.account_service.slippage + trader.account_service.buy_cost pct1 = (sell_price.close * (1 - sell_lost) - buy_price.close * (1 + buy_lost)) / buy_price.close * (1 + buy_lost) # 601318 - buy_price = get_kdata(entity_id='stock_sh_601318', start_timestamp=buy_timestamp, end_timestamp=buy_timestamp, - return_type='domain')[0] - sell_price = get_kdata(entity_id='stock_sh_601318', start_timestamp=sell_timestamp, - end_timestamp=sell_timestamp, return_type='domain')[0] + buy_price = get_kdata( + provider="joinquant", + entity_id="stock_sh_601318", + start_timestamp=buy_timestamp, + end_timestamp=buy_timestamp, + return_type="domain", + )[0] + sell_price = get_kdata( + provider="joinquant", + entity_id="stock_sh_601318", + start_timestamp=sell_timestamp, + end_timestamp=sell_timestamp, + return_type="domain", + )[0] pct2 = (sell_price.close * (1 - sell_lost) - buy_price.close * (1 + buy_lost)) / buy_price.close * (1 + buy_lost) @@ -107,7 +150,14 @@ def test_multiple_trader(): def test_basic_trader(): try: - MyBullTrader(codes=['000338'], level=IntervalLevel.LEVEL_1DAY, start_timestamp='2018-01-01', - end_timestamp='2019-06-30', trader_name='000338_bull_trader', draw_result=False).run() + MyBullTrader( + provider="joinquant", + codes=["000338"], + level=IntervalLevel.LEVEL_1DAY, + start_timestamp="2018-01-01", + end_timestamp="2019-06-30", + trader_name="000338_bull_trader", + draw_result=False, + ).run() except: assert False diff --git a/tests/utils/test_pd_utils.py b/tests/utils/test_pd_utils.py new file mode 100644 index 00000000..865b2e35 --- /dev/null +++ b/tests/utils/test_pd_utils.py @@ -0,0 +1,21 @@ +# -*- coding: utf-8 -*- +import pandas as pd + +from zvt.utils.pd_utils import drop_continue_duplicate + + +def test_drop_continue_duplicate(): + data1 = [1, 2, 2, 3, 4, 4, 5] + s = pd.Series(data=data1) + s1 = drop_continue_duplicate(s=s) + assert s1.tolist() == [1, 2, 3, 4, 5] + + data2 = [1, 2, 2, 2, 4, 4, 5] + + df = pd.DataFrame(data={"A": data1, "B": data2}) + print(df) + df1 = drop_continue_duplicate(s=df, col="A") + assert df1["A"].tolist() == [1, 2, 3, 4, 5] + + df2 = drop_continue_duplicate(s=df, col="B") + assert df2["A"].tolist() == [1, 2, 4, 5] diff --git a/tests/utils/test_time_utils.py b/tests/utils/test_time_utils.py index 402458a1..4f6f2270 100644 --- a/tests/utils/test_time_utils.py +++ b/tests/utils/test_time_utils.py @@ -1,45 +1,71 @@ # -*- coding: utf-8 -*- from zvt.contract import IntervalLevel -from zvt.utils.time_utils import evaluate_size_from_timestamp, next_timestamp, to_pd_timestamp, \ - is_finished_kdata_timestamp, split_time_interval, is_same_date +from zvt.contract.utils import evaluate_size_from_timestamp, next_timestamp_on_level, is_finished_kdata_timestamp +from zvt.utils.time_utils import ( + to_pd_timestamp, + split_time_interval, + is_same_date, + month_start_end_ranges, + count_interval, +) def test_evaluate_size_from_timestamp(): - size = evaluate_size_from_timestamp(start_timestamp='2019-01-01', end_timestamp='2019-01-02', - level=IntervalLevel.LEVEL_1MON, one_day_trading_minutes=4 * 60) + size = evaluate_size_from_timestamp( + start_timestamp="2019-01-01", + end_timestamp="2019-01-02", + level=IntervalLevel.LEVEL_1MON, + one_day_trading_minutes=4 * 60, + ) assert size == 2 - size = evaluate_size_from_timestamp(start_timestamp='2019-01-01', end_timestamp='2019-01-02', - level=IntervalLevel.LEVEL_1WEEK, one_day_trading_minutes=4 * 60) + size = evaluate_size_from_timestamp( + start_timestamp="2019-01-01", + end_timestamp="2019-01-02", + level=IntervalLevel.LEVEL_1WEEK, + one_day_trading_minutes=4 * 60, + ) assert size == 2 - size = evaluate_size_from_timestamp(start_timestamp='2019-01-01', end_timestamp='2019-01-02', - level=IntervalLevel.LEVEL_1DAY, one_day_trading_minutes=4 * 60) + size = evaluate_size_from_timestamp( + start_timestamp="2019-01-01", + end_timestamp="2019-01-02", + level=IntervalLevel.LEVEL_1DAY, + one_day_trading_minutes=4 * 60, + ) assert size == 2 - size = evaluate_size_from_timestamp(start_timestamp='2019-01-01', end_timestamp='2019-01-02', - level=IntervalLevel.LEVEL_1HOUR, one_day_trading_minutes=4 * 60) + size = evaluate_size_from_timestamp( + start_timestamp="2019-01-01", + end_timestamp="2019-01-02", + level=IntervalLevel.LEVEL_1HOUR, + one_day_trading_minutes=4 * 60, + ) assert size == 9 - size = evaluate_size_from_timestamp(start_timestamp='2019-01-01', end_timestamp='2019-01-02', - level=IntervalLevel.LEVEL_1MIN, one_day_trading_minutes=4 * 60) + size = evaluate_size_from_timestamp( + start_timestamp="2019-01-01", + end_timestamp="2019-01-02", + level=IntervalLevel.LEVEL_1MIN, + one_day_trading_minutes=4 * 60, + ) assert size == 481 def test_next_timestamp(): - current = '2019-01-10 13:15' - assert next_timestamp(current, level=IntervalLevel.LEVEL_1MIN) == to_pd_timestamp('2019-01-10 13:16') - assert next_timestamp(current, level=IntervalLevel.LEVEL_5MIN) == to_pd_timestamp('2019-01-10 13:20') - assert next_timestamp(current, level=IntervalLevel.LEVEL_15MIN) == to_pd_timestamp('2019-01-10 13:30') + current = "2019-01-10 13:15" + assert next_timestamp_on_level(current, level=IntervalLevel.LEVEL_1MIN) == to_pd_timestamp("2019-01-10 13:16") + assert next_timestamp_on_level(current, level=IntervalLevel.LEVEL_5MIN) == to_pd_timestamp("2019-01-10 13:20") + assert next_timestamp_on_level(current, level=IntervalLevel.LEVEL_15MIN) == to_pd_timestamp("2019-01-10 13:30") def test_is_finished_kdata_timestamp(): - timestamp = '2019-01-10 13:05' + timestamp = "2019-01-10 13:05" assert not is_finished_kdata_timestamp(timestamp, level=IntervalLevel.LEVEL_1DAY) assert not is_finished_kdata_timestamp(timestamp, level=IntervalLevel.LEVEL_1HOUR) assert not is_finished_kdata_timestamp(timestamp, level=IntervalLevel.LEVEL_30MIN) @@ -47,15 +73,15 @@ def test_is_finished_kdata_timestamp(): assert is_finished_kdata_timestamp(timestamp, level=IntervalLevel.LEVEL_5MIN) assert is_finished_kdata_timestamp(timestamp, level=IntervalLevel.LEVEL_1MIN) - timestamp = '2019-01-10' + timestamp = "2019-01-10" assert is_finished_kdata_timestamp(timestamp, level=IntervalLevel.LEVEL_1DAY) def test_split_time_interval(): first = None last = None - start = '2020-01-01' - end = '2021-01-01' + start = "2020-01-01" + end = "2021-01-01" for interval in split_time_interval(start, end, interval=30): if first is None: first = interval @@ -65,16 +91,17 @@ def test_split_time_interval(): print(last) assert is_same_date(first[0], start) - assert is_same_date(first[-1], '2020-01-31') + assert is_same_date(first[-1], "2020-01-31") assert is_same_date(last[-1], end) + def test_split_time_interval_month(): first = None last = None - start = '2020-01-01' - end = '2021-01-01' - for interval in split_time_interval(start, end, method='month'): + start = "2020-01-01" + end = "2021-01-01" + for interval in split_time_interval(start, end, method="month"): if first is None: first = interval last = interval @@ -83,7 +110,35 @@ def test_split_time_interval_month(): print(last) assert is_same_date(first[0], start) - assert is_same_date(first[-1], '2020-01-31') + assert is_same_date(first[-1], "2020-01-31") + + assert is_same_date(last[0], "2021-01-01") + assert is_same_date(last[-1], "2021-01-01") + + +def test_month_start_end_range(): + start = "2020-01-01" + end = "2021-01-01" + ranges = month_start_end_ranges(start_date=start, end_date=end) + print(ranges) + assert is_same_date(ranges[0][0], "2020-01-01") + assert is_same_date(ranges[0][1], "2020-01-31") + + assert is_same_date(ranges[-1][0], "2020-12-01") + assert is_same_date(ranges[-1][1], "2020-12-31") + + start = "2020-01-01" + end = "2021-01-31" + ranges = month_start_end_ranges(start_date=start, end_date=end) + print(ranges) + assert is_same_date(ranges[0][0], "2020-01-01") + assert is_same_date(ranges[0][1], "2020-01-31") + + assert is_same_date(ranges[-1][0], "2021-01-01") + assert is_same_date(ranges[-1][1], "2021-01-31") + - assert is_same_date(last[0], '2021-01-01') - assert is_same_date(last[-1], '2021-01-01') +def test_count_interval(): + start = "2020-01-01" + end = "2021-01-01" + print(count_interval(start_date=start, end_date=end)) diff --git a/tests/utils/test_utils.py b/tests/utils/test_utils.py index dafe18cf..b5bd370e 100644 --- a/tests/utils/test_utils.py +++ b/tests/utils/test_utils.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- from zvt.contract.api import get_entities -from zvt.utils.utils import iterate_with_step +from zvt.utils.utils import iterate_with_step, to_str, float_to_pct def test_iterate_with_step(): @@ -20,7 +20,7 @@ def test_iterate_with_step(): def test_iterate_entities(): - data = get_entities(entity_type='stock') + data = get_entities(entity_type="stock") first = None last = None for sub_data in iterate_with_step(data): @@ -30,3 +30,19 @@ def test_iterate_entities(): assert len(first) == 100 assert len(last) <= 100 + + +def test_to_str(): + assert to_str(None) is None + assert to_str("") is None + assert to_str("a") == "a" + assert to_str(["a", "b"]) == "a;b" + assert to_str([1, 2]) == "1;2" + + +def test_float_to_pct(): + assert float_to_pct(0.1) == "10.00%" + assert float_to_pct(0.111) == "11.10%" + assert float_to_pct(0.8) == "80.00%" + assert float_to_pct(0.555) == "55.50%" + assert float_to_pct(0.33333) == "33.33%" diff --git a/zvt/__init__.py b/zvt/__init__.py deleted file mode 100644 index b3ad1589..00000000 --- a/zvt/__init__.py +++ /dev/null @@ -1,192 +0,0 @@ -# -*- coding: utf-8 -*- -import importlib -import json -import logging -import os -import pkgutil -import pprint -from logging.handlers import RotatingFileHandler - -import pandas as pd -import pkg_resources -from pkg_resources import get_distribution, DistributionNotFound - -from zvt.consts import DATA_SAMPLE_ZIP_PATH, ZVT_TEST_HOME, ZVT_HOME, ZVT_TEST_DATA_PATH, ZVT_TEST_ZIP_DATA_PATH - -try: - dist_name = __name__ - __version__ = get_distribution(dist_name).version -except DistributionNotFound: - __version__ = 'unknown' -finally: - del get_distribution, DistributionNotFound - -logger = logging.getLogger(__name__) - - -def init_log(file_name='zvt.log', log_dir=None, simple_formatter=True): - if not log_dir: - log_dir = zvt_env['log_path'] - - root_logger = logging.getLogger() - - # reset the handlers - root_logger.handlers = [] - - root_logger.setLevel(logging.INFO) - - file_name = os.path.join(log_dir, file_name) - - file_log_handler = RotatingFileHandler(file_name, maxBytes=524288000, backupCount=10) - - file_log_handler.setLevel(logging.INFO) - - console_log_handler = logging.StreamHandler() - console_log_handler.setLevel(logging.INFO) - - # create formatter and add it to the handlers - if simple_formatter: - formatter = logging.Formatter( - "%(asctime)s %(levelname)s %(threadName)s %(message)s") - else: - formatter = logging.Formatter( - "%(asctime)s %(levelname)s %(threadName)s %(name)s:%(filename)s:%(lineno)s %(funcName)s %(message)s") - file_log_handler.setFormatter(formatter) - console_log_handler.setFormatter(formatter) - - # add the handlers to the logger - root_logger.addHandler(file_log_handler) - root_logger.addHandler(console_log_handler) - - -pd.set_option('expand_frame_repr', False) -pd.set_option('mode.chained_assignment', 'raise') -# pd.set_option('display.max_rows', None) -# pd.set_option('display.max_columns', None) - -zvt_env = {} - -zvt_config = {} - -_plugins = {} - - -def init_env(zvt_home: str, **kwargs) -> dict: - """ - init env - - :param zvt_home: home path for zvt - """ - data_path = os.path.join(zvt_home, 'data') - tmp_path = os.path.join(zvt_home, 'tmp') - if not os.path.exists(data_path): - os.makedirs(data_path) - - if not os.path.exists(tmp_path): - os.makedirs(tmp_path) - - zvt_env['zvt_home'] = zvt_home - zvt_env['data_path'] = data_path - zvt_env['tmp_path'] = tmp_path - - # path for storing ui results - zvt_env['ui_path'] = os.path.join(zvt_home, 'ui') - if not os.path.exists(zvt_env['ui_path']): - os.makedirs(zvt_env['ui_path']) - - # path for storing logs - zvt_env['log_path'] = os.path.join(zvt_home, 'logs') - if not os.path.exists(zvt_env['log_path']): - os.makedirs(zvt_env['log_path']) - - init_log() - - pprint.pprint(zvt_env) - - # init config - init_config(current_config=zvt_config, **kwargs) - # init plugin - # init_plugins() - - return zvt_env - - -def init_config(pkg_name: str = None, current_config: dict = None, **kwargs) -> dict: - """ - init config - """ - - # create default config.json if not exist - if pkg_name: - config_file = f'{pkg_name}_config.json' - else: - pkg_name = 'zvt' - config_file = 'config.json' - - logger.info(f'init config for {pkg_name}, current_config:{current_config}') - - config_path = os.path.join(zvt_env['zvt_home'], config_file) - if not os.path.exists(config_path): - from shutil import copyfile - try: - sample_config = pkg_resources.resource_filename(pkg_name, 'config.json') - if os.path.exists(sample_config): - copyfile(sample_config, config_path) - except Exception as e: - logger.warning(f'could not load config.json from package {pkg_name}') - - if os.path.exists(config_path): - with open(config_path) as f: - config_json = json.load(f) - for k in config_json: - current_config[k] = config_json[k] - - # set and save the config - for k in kwargs: - current_config[k] = kwargs[k] - with open(config_path, 'w+') as outfile: - json.dump(current_config, outfile) - - pprint.pprint(current_config) - logger.info(f'current_config:{current_config}') - - return current_config - - -def init_plugins(): - for finder, name, ispkg in pkgutil.iter_modules(): - if name.startswith('zvt_'): - try: - _plugins[name] = importlib.import_module(name) - except Exception as e: - logger.warning(f'failed to load plugin {name}', e) - logger.info(f'loaded plugins:{_plugins}') - - -if os.getenv('TESTING_ZVT'): - init_env(zvt_home=ZVT_TEST_HOME) - - # init the sample data if need - same = False - if os.path.exists(ZVT_TEST_ZIP_DATA_PATH): - import filecmp - - same = filecmp.cmp(ZVT_TEST_ZIP_DATA_PATH, DATA_SAMPLE_ZIP_PATH) - - if not same: - from shutil import copyfile - from zvt.contract import * - from zvt.utils.zip_utils import unzip - - copyfile(DATA_SAMPLE_ZIP_PATH, ZVT_TEST_ZIP_DATA_PATH) - unzip(ZVT_TEST_ZIP_DATA_PATH, ZVT_TEST_DATA_PATH) - -else: - init_env(zvt_home=ZVT_HOME) - -# register to meta -import zvt.contract as zvt_contract -import zvt.recorders as zvt_recorders -import zvt.factors as zvt_factors - -__all__ = ['zvt_env', 'zvt_config', 'init_log', 'init_env', 'init_config', '__version__'] diff --git a/zvt/api/__init__.py b/zvt/api/__init__.py deleted file mode 100644 index fc84e23d..00000000 --- a/zvt/api/__init__.py +++ /dev/null @@ -1,23 +0,0 @@ -# -*- coding: utf-8 -*- - -# the __all__ is generated -__all__ = [] - -# __init__.py structure: -# common code of the package -# export interface in __all__ which contains __all__ of its sub modules - -# import all from submodule quote -from .quote import * -from .quote import __all__ as _quote_all -__all__ += _quote_all - -# import all from submodule stats -from .stats import * -from .stats import __all__ as _stats_all -__all__ += _stats_all - -# import all from submodule trader_info_api -from .trader_info_api import * -from .trader_info_api import __all__ as _trader_info_api_all -__all__ += _trader_info_api_all \ No newline at end of file diff --git a/zvt/api/quote.py b/zvt/api/quote.py deleted file mode 100644 index ea57db8c..00000000 --- a/zvt/api/quote.py +++ /dev/null @@ -1,247 +0,0 @@ -# -*- coding: utf-8 -*- - -from typing import Union, List - -import numpy as np -import pandas as pd - -from zvt.contract import IntervalLevel, AdjustType, Mixin, PortfolioStockHistory -from zvt.contract.api import decode_entity_id, get_schema_by_name -from zvt.domain import ReportPeriod, EtfStock, Fund, Etf -from zvt.utils.pd_utils import pd_is_not_null -from zvt.utils.time_utils import to_pd_timestamp, now_pd_timestamp, to_time_str, TIME_FORMAT_DAY, TIME_FORMAT_ISO8601 - - -def get_kdata_schema(entity_type: str, - level: Union[IntervalLevel, str] = IntervalLevel.LEVEL_1DAY, - adjust_type: Union[AdjustType, str] = None): - if type(level) == str: - level = IntervalLevel(level) - if type(adjust_type) == str: - adjust_type = AdjustType(adjust_type) - - # kdata schema rule - # 1)name:{entity_type.capitalize()}{IntervalLevel.value.upper()}Kdata - if adjust_type and (adjust_type != AdjustType.qfq): - schema_str = '{}{}{}Kdata'.format(entity_type.capitalize(), level.value.capitalize(), - adjust_type.value.capitalize()) - else: - schema_str = '{}{}Kdata'.format(entity_type.capitalize(), level.value.capitalize()) - return get_schema_by_name(schema_str) - - -def to_report_period_type(report_date): - the_date = to_pd_timestamp(report_date) - if the_date.month == 3 and the_date.day == 31: - return ReportPeriod.season1.value - if the_date.month == 6 and the_date.day == 30: - return ReportPeriod.half_year.value - if the_date.month == 9 and the_date.day == 30: - return ReportPeriod.season3.value - if the_date.month == 12 and the_date.day == 31: - return ReportPeriod.year.value - - return None - - -def get_recent_report_date(the_date=now_pd_timestamp(), step=0): - the_date = to_pd_timestamp(the_date) - assert step >= 0 - if the_date.month >= 10: - recent = "{}{}".format(the_date.year, '-09-30') - elif the_date.month >= 7: - recent = "{}{}".format(the_date.year, '-06-30') - elif the_date.month >= 4: - recent = "{}{}".format(the_date.year, '-03-31') - else: - recent = "{}{}".format(the_date.year - 1, '-12-31') - - if step == 0: - return recent - else: - step = step - 1 - return get_recent_report_date(recent, step) - - -def get_recent_report_period(the_date=now_pd_timestamp(), step=0): - return to_report_period_type(get_recent_report_date(the_date, step=step)) - - -def get_exchange(code): - if code >= '333333': - return 'sh' - else: - return 'sz' - - -def china_stock_code_to_id(code): - return "{}_{}_{}".format('stock', get_exchange(code), code) - - -def generate_kdata_id(entity_id, timestamp, level): - if level >= IntervalLevel.LEVEL_1DAY: - return "{}_{}".format(entity_id, to_time_str(timestamp, fmt=TIME_FORMAT_DAY)) - else: - return "{}_{}".format(entity_id, to_time_str(timestamp, fmt=TIME_FORMAT_ISO8601)) - - -def to_jq_report_period(timestamp): - the_date = to_pd_timestamp(timestamp) - report_period = to_report_period_type(timestamp) - if report_period == ReportPeriod.year.value: - return '{}'.format(the_date.year) - if report_period == ReportPeriod.season1.value: - return '{}q1'.format(the_date.year) - if report_period == ReportPeriod.half_year.value: - return '{}q2'.format(the_date.year) - if report_period == ReportPeriod.season3.value: - return '{}q3'.format(the_date.year) - - assert False - - -def to_high_level_kdata(kdata_df: pd.DataFrame, to_level: IntervalLevel): - def to_close(s): - if pd_is_not_null(s): - return s[-1] - - def to_open(s): - if pd_is_not_null(s): - return s[0] - - def to_high(s): - return np.max(s) - - def to_low(s): - return np.min(s) - - def to_sum(s): - return np.sum(s) - - original_level = kdata_df['level'][0] - entity_id = kdata_df['entity_id'][0] - provider = kdata_df['provider'][0] - name = kdata_df['name'][0] - code = kdata_df['code'][0] - - entity_type, _, _ = decode_entity_id(entity_id=entity_id) - - assert IntervalLevel(original_level) <= IntervalLevel.LEVEL_1DAY - assert IntervalLevel(original_level) < IntervalLevel(to_level) - - df: pd.DataFrame = None - if to_level == IntervalLevel.LEVEL_1WEEK: - # loffset='-2' 用周五作为时间标签 - if entity_type == 'stock': - df = kdata_df.resample('W', loffset=pd.DateOffset(days=-2)).apply({'close': to_close, - 'open': to_open, - 'high': to_high, - 'low': to_low, - 'volume': to_sum, - 'turnover': to_sum}) - else: - df = kdata_df.resample('W', loffset=pd.DateOffset(days=-2)).apply({'close': to_close, - 'open': to_open, - 'high': to_high, - 'low': to_low, - 'volume': to_sum, - 'turnover': to_sum}) - df = df.dropna() - # id entity_id timestamp provider code name level - df['entity_id'] = entity_id - df['provider'] = provider - df['code'] = code - df['name'] = name - - return df - - -def portfolio_relate_stock(df, portfolio): - df['entity_id'] = portfolio.entity_id - df['entity_type'] = portfolio.entity_type - df['exchange'] = portfolio.exchange - df['code'] = portfolio.code - df['name'] = portfolio.name - - return df - - -# 季报只有前十大持仓,半年报和年报才有全量的持仓信息,故根据离timestamp最近的报表(年报 or 半年报)来确定持仓 -def get_portfolio_stocks(portfolio_entity=Fund, code=None, codes=None, ids=None, timestamp=now_pd_timestamp(), - provider=None): - portfolio_stock = f'{portfolio_entity.__name__}Stock' - data_schema: PortfolioStockHistory = get_schema_by_name(portfolio_stock) - latests: List[PortfolioStockHistory] = data_schema.query_data(provider=provider, code=code, end_timestamp=timestamp, - order=data_schema.timestamp.desc(), limit=1, - return_type='domain') - if latests: - latest_record = latests[0] - # 获取最新的报表 - df = data_schema.query_data(provider=provider, code=code, codes=codes, ids=ids, end_timestamp=timestamp, - filters=[data_schema.report_date == latest_record.report_date]) - # 最新的为年报或者半年报 - if latest_record.report_period == ReportPeriod.year or latest_record.report_period == ReportPeriod.half_year: - return df - # 季报,需要结合 年报或半年报 来算持仓 - else: - step = 0 - while step <= 20: - report_date = get_recent_report_date(latest_record.report_date, step=step) - - pre_df = data_schema.query_data(provider=provider, code=code, codes=codes, ids=ids, - end_timestamp=timestamp, - filters=[data_schema.report_date == to_pd_timestamp(report_date)]) - df = df.append(pre_df) - - # 半年报和年报 - if (ReportPeriod.half_year.value in pre_df['report_period'].tolist()) or ( - ReportPeriod.year.value in pre_df['report_period'].tolist()): - # 保留最新的持仓 - df = df.drop_duplicates(subset=['stock_code'], keep='first') - return df - step = step + 1 - - -def get_etf_stocks(code=None, codes=None, ids=None, timestamp=now_pd_timestamp(), provider=None): - return get_portfolio_stocks(portfolio_entity=Etf, code=code, codes=codes, ids=ids, timestamp=timestamp, - provider=provider) - - -def get_fund_stocks(code=None, codes=None, ids=None, timestamp=now_pd_timestamp(), provider=None): - return get_portfolio_stocks(portfolio_entity=Fund, code=code, codes=codes, ids=ids, timestamp=timestamp, - provider=provider) - - -def get_kdata(entity_id=None, entity_ids=None, level=IntervalLevel.LEVEL_1DAY.value, provider=None, columns=None, - return_type='df', start_timestamp=None, end_timestamp=None, - filters=None, session=None, order=None, limit=None, index='timestamp', adjust_type: AdjustType = None): - assert not entity_id or not entity_ids - if entity_ids: - entity_id = entity_ids[0] - else: - entity_ids = [entity_id] - - entity_type, exchange, code = decode_entity_id(entity_id) - data_schema: Mixin = get_kdata_schema(entity_type, level=level, adjust_type=adjust_type) - - return data_schema.query_data(entity_ids=entity_ids, level=level, provider=provider, - columns=columns, return_type=return_type, start_timestamp=start_timestamp, - end_timestamp=end_timestamp, filters=filters, session=session, order=order, - limit=limit, - index=index) - - -if __name__ == '__main__': - df = get_etf_stocks(code='510050', provider='joinquant') - print(df) - - # assert get_kdata_schema(entity_type='stock', level=IntervalLevel.LEVEL_1DAY) == Stock1dKdata - # assert get_kdata_schema(entity_type='stock', level=IntervalLevel.LEVEL_15MIN) == Stock15mKdata - # assert get_kdata_schema(entity_type='stock', level=IntervalLevel.LEVEL_1HOUR) == Stock1hKdata - # - # assert get_kdata_schema(entity_type='coin', level=IntervalLevel.LEVEL_1DAY) == Coin1dKdata - # assert get_kdata_schema(entity_type='coin', level=IntervalLevel.LEVEL_1MIN) == Coin1mKdata -# the __all__ is generated -__all__ = ['get_kdata_schema', 'to_report_period_type', 'get_recent_report_date', 'get_recent_report_period', - 'get_exchange', 'china_stock_code_to_id', 'generate_kdata_id', 'to_jq_report_period', 'to_high_level_kdata', - 'portfolio_relate_stock', 'get_etf_stocks', 'get_kdata', 'get_portfolio_stocks'] diff --git a/zvt/api/stats.py b/zvt/api/stats.py deleted file mode 100644 index eec50e23..00000000 --- a/zvt/api/stats.py +++ /dev/null @@ -1,199 +0,0 @@ -# -*- coding: utf-8 -*- -import enum -import itertools -from typing import Union - -import pandas as pd - -from zvt.api import get_kdata_schema, get_recent_report_date -from zvt.contract import Mixin, AdjustType -from zvt.contract.api import decode_entity_id -from zvt.domain import FundStock, StockValuation -from zvt.utils import now_pd_timestamp, next_date, pd_is_not_null - - -class WindowMethod(enum.Enum): - change = 'change' - avg = 'avg' - sum = 'sum' - - -class TopType(enum.Enum): - positive = 'positive' - negative = 'negative' - - -def get_top_performance_entities(entity_type='stock', start_timestamp=None, end_timestamp=None, pct=0.1, - return_type=None, adjust_type: Union[AdjustType, str] = None, filters=None, - show_name=False): - if not adjust_type and entity_type == 'stock': - adjust_type = AdjustType.hfq - data_schema = get_kdata_schema(entity_type=entity_type, adjust_type=adjust_type) - - return get_top_entities(data_schema=data_schema, start_timestamp=start_timestamp, end_timestamp=end_timestamp, - column='close', pct=pct, method=WindowMethod.change, return_type=return_type, - filters=filters, show_name=show_name) - - -def get_top_fund_holding_stocks(timestamp=None, pct=0.3, by=None): - if not timestamp: - timestamp = now_pd_timestamp() - # 季报一般在report_date后1个月内公布,年报2个月内,年报4个月内 - # 所以取时间点的最近的两个公布点,保证取到数据 - # 所以,这是个滞后的数据,只是为了看个大概,毕竟模糊的正确better than 精确的错误 - report_date = get_recent_report_date(timestamp, 1) - fund_cap_df = FundStock.query_data(filters=[FundStock.report_date >= report_date, FundStock.timestamp <= timestamp], - columns=['stock_id', 'market_cap']) - fund_cap_df = fund_cap_df.groupby('stock_id')['market_cap'].sum().sort_values(ascending=False) - - # 直接根据持有市值返回 - if not by: - s = fund_cap_df.iloc[:int(len(fund_cap_df) * pct)] - - return s.to_frame() - - # 按流通盘比例 - if by == 'trading': - columns = ['entity_id', 'circulating_market_cap'] - # 按市值比例 - elif by == 'all': - columns = ['entity_id', 'market_cap'] - - entity_ids = fund_cap_df.index.tolist() - start_timestamp = next_date(timestamp, -30) - cap_df = StockValuation.query_data(entity_ids=entity_ids, - filters=[StockValuation.timestamp >= start_timestamp, - StockValuation.timestamp <= timestamp], - columns=columns) - if by == 'trading': - cap_df = cap_df.rename(columns={'circulating_market_cap': 'cap'}) - elif by == 'all': - cap_df = cap_df.rename(columns={'market_cap': 'cap'}) - - cap_df = cap_df.groupby('entity_id').mean() - result_df = pd.concat([cap_df, fund_cap_df], axis=1, join='inner') - result_df['pct'] = result_df['market_cap'] / result_df['cap'] - - pct_df = result_df['pct'].sort_values(ascending=False) - - s = pct_df.iloc[:int(len(pct_df) * pct)] - - return s.to_frame() - - -def get_performance(entity_ids, start_timestamp=None, end_timestamp=None, adjust_type: Union[AdjustType, str] = None): - entity_type, _, _ = decode_entity_id(entity_ids[0]) - if not adjust_type and entity_type == 'stock': - adjust_type = AdjustType.hfq - data_schema = get_kdata_schema(entity_type=entity_type, adjust_type=adjust_type) - - result, _ = get_top_entities(data_schema=data_schema, column='close', start_timestamp=start_timestamp, - end_timestamp=end_timestamp, pct=1, method=WindowMethod.change, - return_type=TopType.positive, filters=[data_schema.entity_id.in_(entity_ids)]) - return result - - -def get_top_volume_entities(entity_type='stock', entity_ids=None, start_timestamp=None, end_timestamp=None, pct=0.1, - return_type=TopType.positive, adjust_type: Union[AdjustType, str] = None, - method=WindowMethod.avg): - if not adjust_type and entity_type == 'stock': - adjust_type = AdjustType.hfq - data_schema = get_kdata_schema(entity_type=entity_type, adjust_type=adjust_type) - - filters = None - if entity_ids: - filters = [data_schema.entity_id.in_(entity_ids)] - - result, _ = get_top_entities(data_schema=data_schema, start_timestamp=start_timestamp, end_timestamp=end_timestamp, - column='turnover', pct=pct, method=method, return_type=return_type, filters=filters) - return result - - -def get_top_entities(data_schema: Mixin, column: str, start_timestamp=None, end_timestamp=None, pct=0.1, - method: WindowMethod = WindowMethod.change, return_type: TopType = None, filters=None, - show_name=False): - """ - get top entities in specific domain between time range - - :param data_schema: schema in domain - :param column: schema column - :param start_timestamp: - :param end_timestamp: - :param pct: range (0,1] - :param method: - :param return_type: - :param filters: - :param show_name: show entity name - :return: - """ - if type(method) == str: - method = WindowMethod(method) - - if type(return_type) == str: - return_type = TopType(return_type) - - if show_name: - columns = ['entity_id', column, 'name'] - else: - columns = ['entity_id', column] - - all_df = data_schema.query_data(start_timestamp=start_timestamp, end_timestamp=end_timestamp, - columns=columns, filters=filters) - g = all_df.groupby('entity_id') - tops = {} - names = {} - for entity_id, df in g: - if method == WindowMethod.change: - start = df[column].iloc[0] - end = df[column].iloc[-1] - change = (end - start) / start - tops[entity_id] = change - elif method == WindowMethod.avg: - tops[entity_id] = df[column].mean() - elif method == WindowMethod.sum: - tops[entity_id] = df[column].sum() - - if show_name: - names[entity_id] = df['name'].iloc[0] - - positive_df = None - negative_df = None - top_index = int(len(tops) * pct) - if return_type is None or return_type == TopType.positive: - # from big to small - positive_tops = {k: v for k, v in sorted(tops.items(), key=lambda item: item[1], reverse=True)} - positive_tops = dict(itertools.islice(positive_tops.items(), top_index)) - positive_df = pd.DataFrame.from_dict(positive_tops, orient='index') - - col = 'score' - positive_df.columns = [col] - positive_df.sort_values(by=col, ascending=False) - if return_type is None or return_type == TopType.negative: - # from small to big - negative_tops = {k: v for k, v in sorted(tops.items(), key=lambda item: item[1])} - negative_tops = dict(itertools.islice(negative_tops.items(), top_index)) - negative_df = pd.DataFrame.from_dict(negative_tops, orient='index') - - col = 'score' - negative_df.columns = [col] - negative_df.sort_values(by=col) - - if names: - if pd_is_not_null(positive_df): - positive_df['name'] = positive_df.index.map(lambda x: names[x]) - if pd_is_not_null(negative_df): - negative_df['name'] = negative_df.index.map(lambda x: names[x]) - return positive_df, negative_df - - -if __name__ == '__main__': - from pprint import pprint - - # tops1, tops2 = get_top_performance_entities(start_timestamp='2020-01-01') - # - # pprint(tops1) - # pprint(tops2) - df = get_top_fund_holding_stocks() - pprint(df) -# the __all__ is generated -__all__ = ['get_top_performance_entities', 'get_performance', 'get_top_volume_entities', 'get_top_entities'] diff --git a/zvt/api/trader_info_api.py b/zvt/api/trader_info_api.py deleted file mode 100644 index 3eb607a3..00000000 --- a/zvt/api/trader_info_api.py +++ /dev/null @@ -1,106 +0,0 @@ -# -*- coding: utf-8 -*- -from typing import List, Union - -import pandas as pd - -from zvt.contract import IntervalLevel -from zvt.contract.api import get_data, get_db_session -from zvt.contract.normal_data import NormalData -from zvt.contract.reader import DataReader -from zvt.domain import AccountStats, Order, trader_info, TraderInfo, Position -from zvt.contract.drawer import Drawer - - -def clear_trader(trader_name, session=None): - if not session: - session = get_db_session('zvt', data_schema=TraderInfo) - session.query(TraderInfo).filter(TraderInfo.trader_name == trader_name).delete() - session.query(AccountStats).filter(AccountStats.trader_name == trader_name).delete() - session.query(Position).filter(Position.trader_name == trader_name).delete() - session.query(Order).filter(Order.trader_name == trader_name).delete() - session.commit() - -def get_trader_info(trader_name=None, return_type='df', start_timestamp=None, end_timestamp=None, - filters=None, session=None, order=None, limit=None) -> List[trader_info.TraderInfo]: - if trader_name: - if filters: - filters = filters + [trader_info.TraderInfo.trader_name == trader_name] - else: - filters = [trader_info.TraderInfo.trader_name == trader_name] - - return get_data(data_schema=trader_info.TraderInfo, entity_id=None, codes=None, level=None, provider='zvt', - columns=None, return_type=return_type, start_timestamp=start_timestamp, - end_timestamp=end_timestamp, filters=filters, session=session, order=order, limit=limit) - - -def get_order_securities(trader_name): - items = get_db_session(provider='zvt', data_schema=Order).query(Order.entity_id).filter( - Order.trader_name == trader_name).group_by(Order.entity_id).all() - - return [item[0] for item in items] - - -class AccountStatsReader(DataReader): - - def __init__(self, - the_timestamp: Union[str, pd.Timestamp] = None, - start_timestamp: Union[str, pd.Timestamp] = None, - end_timestamp: Union[str, pd.Timestamp] = None, - columns: List = None, - filters: List = None, - order: object = None, - level: IntervalLevel = IntervalLevel.LEVEL_1DAY, - trader_names: List[str] = None) -> None: - self.trader_names = trader_names - - self.filters = filters - - if self.trader_names: - filter = [AccountStats.trader_name == name for name in self.trader_names] - if self.filters: - self.filters += filter - else: - self.filters = filter - super().__init__(AccountStats, None, None, None, None, None, None, - the_timestamp, start_timestamp, end_timestamp, columns, self.filters, order, None, level, - category_field='trader_name', time_field='timestamp', computing_window=None) - - def draw_line(self, show=True): - drawer = Drawer(main_data=NormalData(self.data_df.copy()[['trader_name', 'timestamp', 'all_value']], - category_field='trader_name')) - return drawer.draw_line(show=show) - - -class OrderReader(DataReader): - def __init__(self, - the_timestamp: Union[str, pd.Timestamp] = None, - start_timestamp: Union[str, pd.Timestamp] = None, - end_timestamp: Union[str, pd.Timestamp] = None, - columns: List = None, - filters: List = None, - order: object = None, - level: IntervalLevel = None, - trader_names: List[str] = None) -> None: - self.trader_names = trader_names - - self.filters = filters - - if self.trader_names: - filter = [Order.trader_name == name for name in self.trader_names] - if self.filters: - self.filters += filter - else: - self.filters = filter - - super().__init__(Order, None, None, None, None, None, None, - the_timestamp, start_timestamp, end_timestamp, columns, self.filters, order, None, level, - category_field='trader_name', time_field='timestamp', computing_window=None) - - -if __name__ == '__main__': - reader = AccountStatsReader(trader_names=['000338_ma_trader']) - drawer = Drawer(main_data=NormalData(reader.data_df.copy()[['trader_name', 'timestamp', 'all_value']], - category_field='trader_name')) - drawer.draw_line() -# the __all__ is generated -__all__ = ['get_trader_info', 'get_order_securities', 'AccountStatsReader', 'OrderReader'] \ No newline at end of file diff --git a/zvt/autocode/generator.py b/zvt/autocode/generator.py deleted file mode 100644 index d4ca497f..00000000 --- a/zvt/autocode/generator.py +++ /dev/null @@ -1,242 +0,0 @@ -# -*- coding: utf-8 -*- -import os -from typing import List - -from zvt.contract import IntervalLevel, AdjustType -from zvt.autocode.templates import all_tpls -from zvt.utils import now_pd_timestamp -from zvt.utils.file_utils import list_all_files -from zvt.utils.git_utils import get_git_user_name, get_git_user_email - - -def all_sub_modules(dir_path: str): - """ - list all module name in specific directory - - :param dir_path: - :return: - """ - modules = [] - for entry in os.scandir(dir_path): - if entry.is_dir() or ( - entry.path.endswith('.py') and not entry.path.endswith('__init__.py')): - module_name = os.path.splitext(os.path.basename(entry.path))[0] - # ignore hidden - if module_name.startswith('.') or not module_name[0].isalpha(): - continue - modules.append(module_name) - return modules - - -def _remove_start_end(line: str, start='class ', end='('): - if line.startswith(start) and (end in line): - start_index = len(start) - end_index = line.index(end) - return line[start_index:end_index] - if not start and (end in line): - end_index = line.index(end) - return line[:end_index] - - -def _get_interface_name(line: str): - """ - get interface name of the line - - :param line: the line of the source - :return: - """ - if line.startswith("class "): - return _remove_start_end(line, "class ", "(") - elif line.startswith("def "): - return _remove_start_end(line, "def ", "(") - - -def all_sub_all(sub_module): - return ''' - -# import all from submodule {0} -from .{0} import * -from .{0} import __all__ as _{0}_all -__all__ += _{0}_all'''.format(sub_module) - - -def fill_package_if_not_exist(dir_path: str): - fill_package(dir_path) - for entry in os.scandir(dir_path): - if entry.is_dir(): - fill_package(entry.path) - fill_package_if_not_exist(entry.path) - elif entry.is_file(): - pass - - -def fill_package(dir_path: str): - base_name = os.path.basename(dir_path) - if base_name[0].isalpha(): - if os.path.isdir(dir_path): - pkg_file = os.path.join(dir_path, '__init__.py') - if not os.path.exists(pkg_file): - package_template = '# -*- coding: utf-8 -*-\n' - with open(pkg_file, 'w') as outfile: - outfile.write(package_template) - - -def gen_exports(dir_path='./domain', gen_flag='# the __all__ is generated'): - fill_package_if_not_exist(dir_path=dir_path) - files = list_all_files(dir_path=dir_path) - for file in files: - exports = [] - lines = [] - # read and generate __all__ - with open(file) as fp: - line = fp.readline() - while line: - if line.startswith(gen_flag): - break - lines.append(line) - export = _get_interface_name(line) - if export and export[0].isalpha(): - exports.append(export) - line = fp.readline() - print(f'{file}:{exports}') - lines.append(gen_flag) - lines.append(f'\n__all__ = {exports}') - - # the package module - basename = os.path.basename(file) - if basename == '__init__.py': - dir_path = os.path.dirname(file) - modules = all_sub_modules(dir_path) - if modules: - lines.append(''' - -# __init__.py structure: -# common code of the package -# export interface in __all__ which contains __all__ of its sub modules''') - for mod in modules: - lines.append(all_sub_all(mod)) - - # write with __all__ - with open(file, mode='w') as fp: - fp.writelines(lines) - - -# kdata schema rule -# 1)name:{entity_type}{level}Kdata -# 2)one db file for one schema - -def gen_kdata_schema(pkg: str, - providers: List[str], - entity_type: str, - levels: List[IntervalLevel], - adjust_types: List[AdjustType] = [None], - entity_in_submodule: bool = False, - kdata_module='quotes'): - tables = [] - - base_path = './domain' - - if kdata_module: - base_path = os.path.join(base_path, kdata_module) - if entity_in_submodule: - base_path = os.path.join(base_path, entity_type) - - for level in levels: - - for adjust_type in adjust_types: - level = IntervalLevel(level) - - cap_entity_type = entity_type.capitalize() - cap_level = level.value.capitalize() - - # you should define {EntityType}KdataCommon in kdata_module at first - kdata_common = f'{cap_entity_type}KdataCommon' - - if adjust_type and (adjust_type != AdjustType.qfq): - class_name = f'{cap_entity_type}{cap_level}{adjust_type.value.capitalize()}Kdata' - table_name = f'{entity_type}_{level.value}_{adjust_type.value.lower()}_kdata' - - else: - class_name = f'{cap_entity_type}{cap_level}Kdata' - table_name = f'{entity_type}_{level.value}_kdata' - - tables.append(table_name) - - schema_template = f'''# -*- coding: utf-8 -*- -# this file is generated by gen_kdata_schema function, dont't change it -from sqlalchemy.ext.declarative import declarative_base - -from zvt.contract.register import register_schema -from {pkg}.domain.{kdata_module} import {kdata_common} - -KdataBase = declarative_base() - - -class {class_name}(KdataBase, {kdata_common}): - __tablename__ = '{table_name}' - - -register_schema(providers={providers}, db_name='{table_name}', schema_base=KdataBase, entity_type='{entity_type}') - -''' - # generate the schema - with open(os.path.join(base_path, f'{table_name}.py'), 'w') as outfile: - outfile.write(schema_template) - - # generate the package - pkg_file = os.path.join(base_path, '__init__.py') - if not os.path.exists(pkg_file): - package_template = '''# -*- coding: utf-8 -*- -''' - with open(pkg_file, 'w') as outfile: - outfile.write(package_template) - - # generate exports - gen_exports('./domain') - - -def gen_plugin_project(entity_type, prefix: str = 'zvt', dir_path: str = '.', providers=['joinquant']): - """ - generate a standard plugin project - - :param entity_type: the entity type of the plugin project - :param prefix: project prefix - :param dir_path: the root path for the project - :param providers: the supported providers - """ - - # generate project files - project = f'{prefix}_{entity_type}' - entity_class = entity_type.capitalize() - project_path = os.path.join(dir_path, project) - if not os.path.exists(project_path): - os.makedirs(project_path) - - current_time = now_pd_timestamp() - user_name = get_git_user_name() - user_email = get_git_user_email() - - for tpl in all_tpls(project=project, entity_type=entity_type): - file_name = tpl[0] - tpl_content = tpl[1].safe_substitute(project=project, - entity_type=entity_type, - entity_class=entity_class, - providers=providers, - provider=providers[0], - Provider=providers[0].capitalize(), - year=current_time.year, - user=user_name, - email=user_email) - file_path = os.path.join(project_path, file_name) - - file_dir = os.path.dirname(file_path) - if not os.path.exists(file_dir): - os.makedirs(file_dir) - - with open(file_path, "w", encoding="utf-8") as fh: - fh.write(tpl_content) - - -# the __all__ is generated -__all__ = ['all_sub_modules', 'all_sub_all', 'fill_package_if_not_exist', 'fill_package', 'gen_exports', - 'gen_kdata_schema', 'gen_plugin_project'] diff --git a/zvt/autocode/templates/__init__.py b/zvt/autocode/templates/__init__.py deleted file mode 100644 index 777e3e68..00000000 --- a/zvt/autocode/templates/__init__.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -import os -import string - -from pkg_resources import resource_string - -from zvt.utils.file_utils import list_all_files - - -def all_tpls(project: str, entity_type: str): - """ - return list of templates(location,Template) - - :param project: - :return: - """ - tpl_dir = os.path.join(os.path.dirname(__file__)) - tpl_files = list_all_files(tpl_dir, ext='template', return_base_name=True) - tpls = [] - for tpl in tpl_files: - data = resource_string(__name__, tpl) - file_location = os.path.splitext(os.path.basename(tpl))[0] - # we assure that line endings are converted to '\n' for all OS - data = data.decode(encoding="utf-8").replace(os.linesep, "\n") - - # change path for specific file - # domain - if file_location == 'kdata_common.py': - file_location = f'{project}/domain/quotes/__init__.py' - elif file_location == 'meta.py': - file_location = f'{project}/domain/{entity_type}_meta.py' - # recorder - elif file_location == 'kdata_recorder.py': - file_location = f'{project}/recorders/{entity_type}_kdata_recorder.py' - elif file_location == 'meta_recorder.py': - file_location = f'{project}/recorders/{entity_type}_meta_recorder.py' - # fill script - elif file_location == 'fill_project.py': - file_location = f'{project}/fill_project.py' - # tests - elif file_location == 'test_pass.py': - file_location = f'tests/test_pass.py' - elif file_location == 'pkg_init.py': - file_location = f'{project}/__init__.py' - - tpls.append((file_location, string.Template(data))) - return tpls - - -# the __all__ is generated -__all__ = ['all_tpls'] diff --git a/zvt/autocode/templates/setup.py.template b/zvt/autocode/templates/setup.py.template deleted file mode 100644 index a572ccfa..00000000 --- a/zvt/autocode/templates/setup.py.template +++ /dev/null @@ -1,65 +0,0 @@ -#!/usr/bin/env python - -# To use a consistent encoding -from codecs import open -from os import path - -# Always prefer setuptools over distutils -from setuptools import setup, find_packages - -try: - # for pip >= 10 - from pip._internal.req import parse_requirements -except ImportError: - # for pip <= 9.0.3 - from pip.req import parse_requirements - -here = path.abspath(path.dirname(__file__)) - -# Get the long description from the README file -with open(path.join(here, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - -# Arguments marked as "Required" below must be included for upload to PyPI. -# Fields marked as "Optional" may be commented out. - -install_reqs = parse_requirements("requirements.txt", session=False) - -try: - requirements = [str(ir.req) for ir in install_reqs] -except: - requirements = [str(ir.requirement) for ir in install_reqs] - -setup( - name='${project}', - version='0.0.1', - description='unified,modular quant framework for human beings ', - long_description=long_description, - url='https://github.com/zvtvz/${project}', - author='${user}', - author_email='${email}', - classifiers=[ # Optional - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: Customer Service', - 'Intended Audience :: Education', - 'Intended Audience :: Financial and Insurance Industry', - 'Topic :: Software Development :: Build Tools', - 'Topic :: Office/Business :: Financial :: Investment', - 'License :: OSI Approved :: MIT License', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7' - ], - keywords='quant stock finance fintech big-data zvt ma-analysis trading-platform pandas fundamental-analysis', - packages=find_packages(include=['${project}.*', '${project}']), - python_requires='>=3.5, <4', - include_package_data=True, - install_requires=requirements, - project_urls={ - 'Bug Reports': 'https://github.com/zvtvz/${project}/issues', - 'Funding': 'https://github.com/zvtvz/${project}', - 'Say Thanks!': 'https://saythanks.io/to/foolcage', - 'Source': 'https://github.com/zvtvz/${project}', - }, - long_description_content_type="text/markdown" -) diff --git a/zvt/consts.py b/zvt/consts.py deleted file mode 100644 index 3b4f3656..00000000 --- a/zvt/consts.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -import os -from pathlib import Path - -# zvt home dir -ZVT_HOME = os.environ.get('ZVT_HOME') -if not ZVT_HOME: - ZVT_HOME = os.path.abspath(os.path.join(Path.home(), 'zvt-home')) - -# data for testing -ZVT_TEST_HOME = os.path.abspath(os.path.join(Path.home(), 'zvt-test-home')) -ZVT_TEST_ZIP_DATA_PATH = os.path.join(ZVT_TEST_HOME, 'data.zip') -ZVT_TEST_DATA_PATH = os.path.join(ZVT_TEST_HOME, 'data') - -DATA_SAMPLE_ZIP_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), 'samples', 'data.zip')) - -# ****** setting for stocks ****** # -# 覆盖维度 银行/保险/企业/券商 创业板 中小板 主板 -SAMPLE_STOCK_CODES = ['000001', '000783', '000778', '603220', '601318', '000338', '002572', '300027'] - -# 沪深300,证券,中证500,上证50,创业板,军工,传媒,资源 -SAMPLE_ETF_CODES = ['510300', '512880', '510500', '510050', '159915', '512660', '512980', '510410'] diff --git a/zvt/contract/__init__.py b/zvt/contract/__init__.py deleted file mode 100644 index f63238ab..00000000 --- a/zvt/contract/__init__.py +++ /dev/null @@ -1,114 +0,0 @@ -# -*- coding: utf-8 -*- -import enum - - -class IntervalLevel(enum.Enum): - LEVEL_TICK = 'tick' - LEVEL_1MIN = '1m' - LEVEL_5MIN = '5m' - LEVEL_15MIN = '15m' - LEVEL_30MIN = '30m' - LEVEL_1HOUR = '1h' - LEVEL_4HOUR = '4h' - LEVEL_1DAY = '1d' - LEVEL_1WEEK = '1wk' - LEVEL_1MON = '1mon' - - def to_pd_freq(self): - if self == IntervalLevel.LEVEL_1MIN: - return '1min' - if self == IntervalLevel.LEVEL_5MIN: - return '5min' - if self == IntervalLevel.LEVEL_15MIN: - return '15min' - if self == IntervalLevel.LEVEL_30MIN: - return '30min' - if self == IntervalLevel.LEVEL_1HOUR: - return '1H' - if self == IntervalLevel.LEVEL_4HOUR: - return '4H' - if self >= IntervalLevel.LEVEL_1DAY: - return '1D' - - def floor_timestamp(self, pd_timestamp): - if self == IntervalLevel.LEVEL_1MIN: - return pd_timestamp.floor('1min') - if self == IntervalLevel.LEVEL_5MIN: - return pd_timestamp.floor('5min') - if self == IntervalLevel.LEVEL_15MIN: - return pd_timestamp.floor('15min') - if self == IntervalLevel.LEVEL_30MIN: - return pd_timestamp.floor('30min') - if self == IntervalLevel.LEVEL_1HOUR: - return pd_timestamp.floor('1h') - if self == IntervalLevel.LEVEL_4HOUR: - return pd_timestamp.floor('4h') - if self == IntervalLevel.LEVEL_1DAY: - return pd_timestamp.floor('1d') - - def to_minute(self): - return int(self.to_second() / 60) - - def to_second(self): - return int(self.to_ms() / 1000) - - def to_ms(self): - # we treat tick intervals is 5s, you could change it - if self == IntervalLevel.LEVEL_TICK: - return 5 * 1000 - if self == IntervalLevel.LEVEL_1MIN: - return 60 * 1000 - if self == IntervalLevel.LEVEL_5MIN: - return 5 * 60 * 1000 - if self == IntervalLevel.LEVEL_15MIN: - return 15 * 60 * 1000 - if self == IntervalLevel.LEVEL_30MIN: - return 30 * 60 * 1000 - if self == IntervalLevel.LEVEL_1HOUR: - return 60 * 60 * 1000 - if self == IntervalLevel.LEVEL_4HOUR: - return 4 * 60 * 60 * 1000 - if self == IntervalLevel.LEVEL_1DAY: - return 24 * 60 * 60 * 1000 - if self == IntervalLevel.LEVEL_1WEEK: - return 7 * 24 * 60 * 60 * 1000 - if self == IntervalLevel.LEVEL_1MON: - return 31 * 7 * 24 * 60 * 60 * 1000 - - def __ge__(self, other): - if self.__class__ is other.__class__: - return self.to_ms() >= other.to_ms() - return NotImplemented - - def __gt__(self, other): - - if self.__class__ is other.__class__: - return self.to_ms() > other.to_ms() - return NotImplemented - - def __le__(self, other): - if self.__class__ is other.__class__: - return self.to_ms() <= other.to_ms() - return NotImplemented - - def __lt__(self, other): - if self.__class__ is other.__class__: - return self.to_ms() < other.to_ms() - return NotImplemented - - -class AdjustType(enum.Enum): - # 这里用拼音,因为英文不直观 split-adjusted?wtf? - # 不复权 - bfq = 'bfq' - # 前复权 - qfq = 'qfq' - # 后复权 - hfq = 'hfq' - - -from . import zvt_context -from .schema import Mixin, NormalMixin, EntityMixin, NormalEntityMixin, PortfolioStock, Portfolio, PortfolioStockHistory - -__all__ = ['IntervalLevel', 'Mixin', 'NormalMixin', 'EntityMixin', 'NormalEntityMixin', 'zvt_context', 'AdjustType', - 'Portfolio', 'PortfolioStock', 'PortfolioStockHistory'] diff --git a/zvt/contract/api.py b/zvt/contract/api.py deleted file mode 100644 index 93bb3f2a..00000000 --- a/zvt/contract/api.py +++ /dev/null @@ -1,528 +0,0 @@ -# -*- coding: utf-8 -*- -import logging -import os -import platform -from typing import List, Union, Type - -import pandas as pd -from sqlalchemy import create_engine -from sqlalchemy import func, exists, and_ -from sqlalchemy.engine import Engine -from sqlalchemy.ext.declarative import DeclarativeMeta -from sqlalchemy.orm import Query -from sqlalchemy.orm import sessionmaker, Session - -from zvt import zvt_env -from zvt.contract import IntervalLevel, EntityMixin -from zvt.contract import Mixin -from zvt.contract import zvt_context -from zvt.utils.pd_utils import pd_is_not_null, index_df -from zvt.utils.time_utils import to_pd_timestamp - -logger = logging.getLogger(__name__) - - -def get_db_name(data_schema: DeclarativeMeta) -> str: - """ - get db name of the domain schema - - :param data_schema: - :type data_schema: - :return: - :rtype: - """ - for db_name, base in zvt_context.dbname_map_base.items(): - if issubclass(data_schema, base): - return db_name - - -def get_db_engine(provider: str, - db_name: str = None, - data_schema: object = None, - data_path: str = zvt_env['data_path']) -> Engine: - """ - get db engine of the (provider,db_name) or (provider,data_schema) - - - :param data_path: - :param provider: - :type provider: - :param db_name: - :type db_name: - :param data_schema: - :type data_schema: - :return: - :rtype: - """ - if data_schema: - db_name = get_db_name(data_schema=data_schema) - - db_path = os.path.join(data_path, '{}_{}.db?check_same_thread=False'.format(provider, db_name)) - - engine_key = '{}_{}'.format(provider, db_name) - db_engine = zvt_context.db_engine_map.get(engine_key) - if not db_engine: - db_engine = create_engine('sqlite:///' + db_path, echo=False) - zvt_context.db_engine_map[engine_key] = db_engine - return db_engine - - -def get_schemas(provider: str) -> List[DeclarativeMeta]: - """ - get domain schemas supported by the provider - - :param provider: - :type provider: - :return: - :rtype: - """ - schemas = [] - for provider1, dbs in zvt_context.provider_map_dbnames.items(): - if provider == provider1: - for dbname in dbs: - schemas1 = zvt_context.dbname_map_schemas.get(dbname) - if schemas1: - schemas += schemas1 - return schemas - - -def get_db_session(provider: str, - db_name: str = None, - data_schema: object = None, - force_new: bool = False) -> Session: - """ - get db session of the (provider,db_name) or (provider,data_schema) - - :param provider: - :type provider: - :param db_name: - :type db_name: - :param data_schema: - :type data_schema: - :param force_new: - :type force_new: - - :return: - :rtype: - """ - if data_schema: - db_name = get_db_name(data_schema=data_schema) - - session_key = '{}_{}'.format(provider, db_name) - - if force_new: - return get_db_session_factory(provider, db_name, data_schema)() - - session = zvt_context.sessions.get(session_key) - if not session: - session = get_db_session_factory(provider, db_name, data_schema)() - zvt_context.sessions[session_key] = session - return session - - -def get_db_session_factory(provider: str, - db_name: str = None, - data_schema: object = None): - """ - get db session factory of the (provider,db_name) or (provider,data_schema) - - :param provider: - :type provider: - :param db_name: - :type db_name: - :param data_schema: - :type data_schema: - :return: - :rtype: - """ - if data_schema: - db_name = get_db_name(data_schema=data_schema) - - session_key = '{}_{}'.format(provider, db_name) - session = zvt_context.db_session_map.get(session_key) - if not session: - session = sessionmaker() - zvt_context.db_session_map[session_key] = session - return session - - -def domain_name_to_table_name(domain_name: str) -> str: - parts = [] - part = '' - for c in domain_name: - if c.isupper() or c.isdigit(): - if part: - parts.append(part) - part = c.lower() - else: - part = part + c - - parts.append(part) - - if len(parts) > 1: - return '_'.join(parts) - elif parts: - return parts[0] - - -def table_name_to_domain_name(table_name: str) -> DeclarativeMeta: - """ - the rules for table_name -> domain_class - - :param table_name: - :type table_name: - :return: - :rtype: - """ - parts = table_name.split('_') - domain_name = '' - for part in parts: - domain_name = domain_name + part.capitalize() - return domain_name - - -def get_entity_schema(entity_type: str) -> object: - """ - get entity schema from name - - :param entity_type: - :type entity_type: - :return: - :rtype: - """ - return zvt_context.zvt_entity_schema_map[entity_type] - - -def get_schema_by_name(name: str) -> DeclarativeMeta: - """ - get domain schema by the name - - :param name: - :type name: - :return: - :rtype: - """ - for schema in zvt_context.schemas: - if schema.__name__ == name: - return schema - - -def get_schema_columns(schema: DeclarativeMeta) -> object: - """ - get all columns of the domain schema - - :param schema: - :type schema: - :return: - :rtype: - """ - return schema.__table__.columns.keys() - - -def common_filter(query: Query, - data_schema, - start_timestamp=None, - end_timestamp=None, - filters=None, - order=None, - limit=None, - time_field='timestamp'): - assert data_schema is not None - time_col = eval('data_schema.{}'.format(time_field)) - - if start_timestamp: - query = query.filter(time_col >= to_pd_timestamp(start_timestamp)) - if end_timestamp: - query = query.filter(time_col <= to_pd_timestamp(end_timestamp)) - - if filters: - for filter in filters: - query = query.filter(filter) - if order is not None: - query = query.order_by(order) - else: - query = query.order_by(time_col.asc()) - if limit: - query = query.limit(limit) - - return query - - -def del_data(data_schema: Type[Mixin], filters: List = None, provider=None): - if not provider: - provider = data_schema.providers[0] - - session = get_db_session(provider=provider, data_schema=data_schema) - query = session.query(data_schema) - if filters: - for f in filters: - query = query.filter(f) - query.delete() - session.commit() - - -def get_data(data_schema, - ids: List[str] = None, - entity_ids: List[str] = None, - entity_id: str = None, - codes: List[str] = None, - code: str = None, - level: Union[IntervalLevel, str] = None, - provider: str = None, - columns: List = None, - col_label: dict = None, - return_type: str = 'df', - start_timestamp: Union[pd.Timestamp, str] = None, - end_timestamp: Union[pd.Timestamp, str] = None, - filters: List = None, - session: Session = None, - order=None, - limit: int = None, - index: Union[str, list] = None, - time_field: str = 'timestamp'): - assert data_schema is not None - assert provider is not None - assert provider in zvt_context.providers - - if not session: - session = get_db_session(provider=provider, data_schema=data_schema) - - time_col = eval('data_schema.{}'.format(time_field)) - - if columns: - # support str - if type(columns[0]) == str: - columns_ = [] - for col in columns: - assert isinstance(col, str) - columns_.append(eval('data_schema.{}'.format(col))) - columns = columns_ - - # make sure get timestamp - if time_col not in columns: - columns.append(time_col) - - if col_label: - columns_ = [] - for col in columns: - if col.name in col_label: - columns_.append(col.label(col_label.get(col.name))) - else: - columns_.append(col) - columns = columns_ - - query = session.query(*columns) - else: - query = session.query(data_schema) - - if entity_id: - query = query.filter(data_schema.entity_id == entity_id) - if entity_ids: - query = query.filter(data_schema.entity_id.in_(entity_ids)) - if code: - query = query.filter(data_schema.code == code) - if codes: - query = query.filter(data_schema.code.in_(codes)) - if ids: - query = query.filter(data_schema.id.in_(ids)) - - # we always store different level in different schema,the level param is not useful now - if level: - try: - # some schema has no level,just ignore it - data_schema.level - if type(level) == IntervalLevel: - level = level.value - query = query.filter(data_schema.level == level) - except Exception as e: - pass - - query = common_filter(query, data_schema=data_schema, start_timestamp=start_timestamp, - end_timestamp=end_timestamp, filters=filters, order=order, limit=limit, - time_field=time_field) - - if return_type == 'df': - df = pd.read_sql(query.statement, query.session.bind) - if pd_is_not_null(df): - if index: - df = index_df(df, index=index, time_field=time_field) - return df - elif return_type == 'domain': - return query.all() - elif return_type == 'dict': - return [item.__dict__ for item in query.all()] - - -def data_exist(session, schema, id): - return session.query(exists().where(and_(schema.id == id))).scalar() - - -def get_data_count(data_schema, filters=None, session=None): - query = session.query(data_schema) - if filters: - for filter in filters: - query = query.filter(filter) - - count_q = query.statement.with_only_columns([func.count()]).order_by(None) - count = session.execute(count_q).scalar() - return count - - -def get_group(provider, data_schema, column, group_func=func.count, session=None): - if not session: - session = get_db_session(provider=provider, data_schema=data_schema) - if group_func: - query = session.query(column, group_func(column)).group_by(column) - else: - query = session.query(column).group_by(column) - df = pd.read_sql(query.statement, query.session.bind) - return df - - -def decode_entity_id(entity_id: str): - result = entity_id.split('_') - entity_type = result[0] - exchange = result[1] - code = ''.join(result[2:]) - return entity_type, exchange, code - - -def get_entity_type(entity_id: str): - entity_type, _, _ = decode_entity_id(entity_id) - return entity_type - - -def get_entity_exchange(entity_id: str): - _, exchange, _ = decode_entity_id(entity_id) - return exchange - - -def get_entity_code(entity_id: str): - _, _, code = decode_entity_id(entity_id) - return code - - -def df_to_db(df: pd.DataFrame, - data_schema: DeclarativeMeta, - provider: str, - force_update: bool = False, - sub_size: int = 5000, - drop_duplicates: bool = False) -> object: - """ - FIXME:improve - store the df to db - - :param df: - :param data_schema: - :param provider: - :param force_update: - :param sub_size: - :param drop_duplicates: - :return: - """ - if not pd_is_not_null(df): - return 0 - - if drop_duplicates and df.duplicated(subset='id').any(): - logger.warning(f'remove duplicated:{df[df.duplicated()]}') - df = df.drop_duplicates(subset='id', keep='last') - - db_engine = get_db_engine(provider, data_schema=data_schema) - - schema_cols = get_schema_columns(data_schema) - cols = set(df.columns.tolist()) & set(schema_cols) - - if not cols: - print('wrong cols') - return 0 - - df = df[cols] - - size = len(df) - - if platform.system() == "Windows": - sub_size = 900 - - if size >= sub_size: - step_size = int(size / sub_size) - if size % sub_size: - step_size = step_size + 1 - else: - step_size = 1 - - saved = 0 - - for step in range(step_size): - df_current = df.iloc[sub_size * step:sub_size * (step + 1)] - if force_update: - session = get_db_session(provider=provider, data_schema=data_schema) - ids = df_current["id"].tolist() - if len(ids) == 1: - sql = f'delete from {data_schema.__tablename__} where id = "{ids[0]}"' - else: - sql = f'delete from {data_schema.__tablename__} where id in {tuple(ids)}' - - session.execute(sql) - session.commit() - - else: - current = get_data(data_schema=data_schema, columns=[data_schema.id], provider=provider, - ids=df_current['id'].tolist()) - if pd_is_not_null(current): - df_current = df_current[~df_current['id'].isin(current['id'])] - - if pd_is_not_null(df_current): - saved = saved + len(df_current) - df_current.to_sql(data_schema.__tablename__, db_engine, index=False, if_exists='append') - - return saved - - -def get_entities( - entity_schema: EntityMixin = None, - entity_type: str = None, - exchanges: List[str] = None, - ids: List[str] = None, - entity_ids: List[str] = None, - entity_id: str = None, - codes: List[str] = None, - code: str = None, - provider: str = None, - columns: List = None, - col_label: dict = None, - return_type: str = 'df', - start_timestamp: Union[pd.Timestamp, str] = None, - end_timestamp: Union[pd.Timestamp, str] = None, - filters: List = None, - session: Session = None, - order=None, - limit: int = None, - index: Union[str, list] = 'code') -> object: - if not entity_schema: - entity_schema = zvt_context.entity_schema_map[entity_type] - - if not provider: - provider = entity_schema.providers[0] - - if not order: - order = entity_schema.code.asc() - - if exchanges: - if filters: - filters.append(entity_schema.exchange.in_(exchanges)) - else: - filters = [entity_schema.exchange.in_(exchanges)] - - return get_data(data_schema=entity_schema, ids=ids, entity_ids=entity_ids, entity_id=entity_id, codes=codes, - code=code, level=None, provider=provider, columns=columns, col_label=col_label, - return_type=return_type, start_timestamp=start_timestamp, end_timestamp=end_timestamp, - filters=filters, session=session, order=order, limit=limit, index=index) - - -def get_entity_ids(entity_type='stock', entity_schema: EntityMixin = None, exchanges=None, codes=None, provider=None, - filters=None): - df = get_entities(entity_type=entity_type, entity_schema=entity_schema, exchanges=exchanges, codes=codes, - provider=provider, filters=filters) - if pd_is_not_null(df): - return df['entity_id'].to_list() - return None diff --git a/zvt/contract/consts.py b/zvt/contract/consts.py deleted file mode 100644 index fd9d6aca..00000000 --- a/zvt/contract/consts.py +++ /dev/null @@ -1,49 +0,0 @@ -# -*- coding: utf-8 -*- -from enum import Enum - - -class PlayerType(Enum): - # 个人 - individual = 'individual' - # 基金 - fund = 'fund' - # 社保 - social_security = 'social_security' - # 保险 - insurance = 'insurance' - # 外资 - qfii = 'qfii' - # 信托 - trust = 'trust' - # 券商 - broker = 'broker' - # 公司(包括私募) - corporation = 'corporation' - - -class EntityType(Enum): - stock = 'stock' - future = 'future' - coin = 'coin' - option = 'option' - fund = 'fund' - - -class Exchange(Enum): - # 上证交易所 - sh = 'sh' - # 深证交易所 - sz = 'sz' - - # 数字货币 - binance = 'binance' - huobipro = 'huobipro' - - # 上海期货交易所 - shfe = 'shfe' - # 大连商品交易所 - dce = "dce" - # 郑州商品交易所 - czce = 'czce' - # 中国金融期货交易所 - cffex = 'cffex' diff --git a/zvt/contract/drawer.py b/zvt/contract/drawer.py deleted file mode 100644 index edc8b9b7..00000000 --- a/zvt/contract/drawer.py +++ /dev/null @@ -1,554 +0,0 @@ -# -*- coding: utf-8 -*- -import logging -from typing import List, Optional - -import numpy as np -import pandas as pd -import plotly.graph_objs as go -from plotly.subplots import make_subplots - -from zvt.contract.api import decode_entity_id -from zvt.contract.data_type import Bean -from zvt.contract.normal_data import NormalData -from zvt.utils.pd_utils import pd_is_not_null - -logger = logging.getLogger(__name__) - - -class Rect(Bean): - - def __init__(self, x0=None, y0=None, x1=None, y1=None) -> None: - # left-bottom - self.x0 = x0 - self.y0 = y0 - # right-top - self.x1 = x1 - self.y1 = y1 - - -class Draw(object): - def draw_kline(self, width=None, height=None, title=None, keep_ui_state=True, show=False, **kwargs): - return self._draw('kline', width=width, height=height, title=title, keep_ui_state=keep_ui_state, show=show, - **kwargs) - - def draw_line(self, width=None, height=None, title=None, keep_ui_state=True, show=False, **kwargs): - return self.draw_scatter(mode='lines', width=width, height=height, title=title, - keep_ui_state=keep_ui_state, show=show, **kwargs) - - def draw_area(self, width=None, height=None, title=None, keep_ui_state=True, show=False, **kwargs): - return self.draw_scatter(mode='none', width=width, height=height, title=title, - keep_ui_state=keep_ui_state, show=show, **kwargs) - - def draw_scatter(self, mode='markers', width=None, height=None, - title=None, keep_ui_state=True, show=False, **kwargs): - return self._draw('scatter', mode=mode, width=width, height=height, title=title, keep_ui_state=keep_ui_state, - show=show, **kwargs) - - def _draw(self, - main_chart='kline', - sub_chart='bar', - mode='lines', - width=None, - height=None, - title=None, - keep_ui_state=True, - show=False, - **kwargs): - - raise NotImplementedError() - - def default_layout(self, - width=None, - height=None, - title=None, - keep_ui_state=True, - **layout_params): - if keep_ui_state: - uirevision = True - else: - uirevision = None - - return dict(showlegend=True, - plot_bgcolor="#FFF", - hovermode="x", - hoverdistance=100, # Distance to show hover label of data point - spikedistance=1000, # Distance to show spike - uirevision=uirevision, - height=height, - width=width, - title=title, - yaxis=dict( - autorange=True, - fixedrange=False, - zeroline=False, - linecolor="#BCCCDC", - showgrid=False, - ), - xaxis=dict( - linecolor="#BCCCDC", - showgrid=False, - showspikes=True, # Show spike line for X-axis - # Format spike - spikethickness=2, - spikedash="dot", - spikecolor="#999999", - spikemode="across", - rangeselector=dict( - buttons=list([ - dict(count=1, - label="1m", - step="month", - stepmode="backward"), - dict(count=3, - label="3m", - step="month", - stepmode="backward"), - dict(count=6, - label="6m", - step="month", - stepmode="backward"), - dict(count=1, - label="YTD", - step="year", - stepmode="todate"), - dict(count=1, - label="1y", - step="year", - stepmode="backward"), - dict(step="all") - ]) - ), - rangeslider=dict( - visible=True, - ), - type="date" - ), - legend_orientation="h", - hoverlabel={"namelength": -1}, - **layout_params) - - -class Drawable(object): - - def drawer(self): - drawer = Drawer(main_df=self.drawer_main_df(), - main_data=self.drawer_main_data(), - factor_df_list=self.drawer_factor_df_list(), - factor_data_list=self.drawer_factor_data_list(), - sub_df_list=self.drawer_sub_df_list(), - sub_data_list=self.drawer_sub_data_list(), - sub_col_chart=self.drawer_sub_col_chart(), - annotation_df=self.drawer_annotation_df(), - rects=self.drawer_rects()) - return drawer - - def draw(self, main_chart='kline', width=None, height=None, title=None, keep_ui_state=True, show=False, **kwargs): - return self.drawer()._draw(main_chart=main_chart, width=width, height=height, title=title, - keep_ui_state=keep_ui_state, - show=show, - **kwargs) - - def drawer_main_df(self) -> Optional[pd.DataFrame]: - return None - - def drawer_main_data(self) -> Optional[NormalData]: - return None - - def drawer_factor_df_list(self) -> Optional[List[pd.DataFrame]]: - return None - - def drawer_factor_data_list(self) -> Optional[List[NormalData]]: - return None - - def drawer_sub_df_list(self) -> Optional[List[pd.DataFrame]]: - return None - - def drawer_sub_data_list(self) -> Optional[List[NormalData]]: - return None - - def drawer_annotation_df(self) -> Optional[pd.DataFrame]: - return None - - def drawer_rects(self) -> Optional[List[Rect]]: - return None - - def drawer_sub_col_chart(self) -> Optional[dict]: - return None - - -class StackedDrawer(Draw): - def __init__(self, *drawers) -> None: - super().__init__() - assert len(drawers) > 1 - self.drawers: List[Drawer] = drawers - - def make_y_layout(self, index, total, start_index=1, domain_range=(0, 1)): - part = (domain_range[1] - domain_range[0]) / total - - if index == 1: - yaxis = 'yaxis' - y = 'y' - else: - yaxis = f'yaxis{index}' - y = f'y{index}' - - return yaxis, y, dict(anchor="x", - autorange=True, - fixedrange=False, - zeroline=False, - linecolor="#BCCCDC", - showgrid=False, - domain=[domain_range[0] + part * (index - start_index), - domain_range[0] + part * (index - start_index + 1)]) - - def _draw(self, - main_chart='kline', - sub_chart='bar', - mode='lines', - width=None, - height=None, - title=None, - keep_ui_state=True, - show=False, - **kwargs): - stacked_fig = go.Figure() - - total = len(self.drawers) - start = 1 - domain_range = (0, 1) - for drawer in self.drawers: - if drawer.has_sub_plot(): - domain_range = (0.2, 1) - start = 2 - break - for index, drawer in enumerate(self.drawers, start=start): - traces, sub_traces = drawer.make_traces(main_chart=main_chart, sub_chart=sub_chart, mode=mode, **kwargs) - - # fix sub traces as the bottom - if sub_traces: - yaxis, y, layout = self.make_y_layout(index=1, total=1, domain_range=(0, 0.2)) - # update sub_traces with yaxis - for trace in sub_traces: - trace.yaxis = y - stacked_fig.add_traces(sub_traces) - stacked_fig.layout[yaxis] = layout - - # make y layouts - yaxis, y, layout = self.make_y_layout(index=index, total=total, start_index=start, - domain_range=domain_range) - - stacked_fig.layout[yaxis] = layout - - # update traces with yaxis - for trace in traces: - trace.yaxis = y - stacked_fig.add_traces(traces) - - # update shapes with yaxis - if drawer.rects: - for rect in drawer.rects: - stacked_fig.add_shape(type="rect", - x0=rect.x0, y0=rect.y0, x1=rect.x1, y1=rect.y1, - line=dict( - color="RoyalBlue", - width=1), - # fillcolor="LightSkyBlue", - yref=y) - - # annotations - stacked_fig.layout['annotations'] = annotations(drawer.annotation_df, yref=y) - - stacked_fig.update_layout( - self.default_layout(width=width, height=height, title=title, keep_ui_state=keep_ui_state)) - - if show: - stacked_fig.show() - else: - return stacked_fig - - -class Drawer(Draw): - def __init__(self, - main_df: pd.DataFrame = None, - factor_df_list: List[pd.DataFrame] = None, - sub_df_list: pd.DataFrame = None, - main_data: NormalData = None, - factor_data_list: List[NormalData] = None, - sub_data_list: NormalData = None, - sub_col_chart: Optional[dict] = None, - rects: List[Rect] = None, - annotation_df: pd.DataFrame = None) -> None: - """ - - :param main_df: df for main chart - :param factor_df_list: list of factor df on main chart - :param sub_df_list: df for sub chart under main chart - :param main_data: NormalData wrap main_df,use either - :param factor_data_list: list of NormalData wrap factor_df,use either - :param sub_data_list: NormalData wrap sub_df,use either - :param annotation_df: - """ - - # 主图数据 - if main_data is None: - main_data = NormalData(main_df) - self.main_data: NormalData = main_data - - # 主图因子 - if not factor_data_list and factor_df_list: - factor_data_list = [] - for df in factor_df_list: - factor_data_list.append(NormalData(df)) - # 每一个df可能有多个column, 代表多个指标,对于连续型的,可以放在一个df里面 - # 对于离散型的,比如一些特定模式的连线,放在多个df里面较好,因为index不同 - self.factor_data_list: List[NormalData] = factor_data_list - - # 副图数据 - if not sub_data_list and sub_df_list: - sub_data_list = [] - for df in sub_df_list: - sub_data_list.append(NormalData(df)) - # 每一个df可能有多个column, 代表多个指标,对于连续型的,可以放在一个df里面 - # 对于离散型的,比如一些特定模式的连线,放在多个df里面较好,因为index不同 - self.sub_data_list: List[NormalData] = sub_data_list - - # 幅图col对应的图形,line or bar - self.sub_col_chart = sub_col_chart - - # 主图的标记数据 - self.annotation_df = annotation_df - - # list of rect - self.rects = rects - - def add_factor_df(self, df: pd.DataFrame): - self.add_factor_data(NormalData(df)) - - def add_factor_data(self, data: NormalData): - if not self.factor_data_list: - self.factor_data_list = [] - self.factor_data_list.append(data) - - def add_sub_df(self, df: pd.DataFrame): - self.add_sub_data(NormalData(df)) - - def add_sub_data(self, data: NormalData): - if not self.sub_data_list: - self.sub_data_list = [] - self.sub_data_list.append(data) - - def has_sub_plot(self): - return self.sub_data_list is not None and not self.sub_data_list[0].empty() - - def make_traces(self, - main_chart='kline', - sub_chart='bar', - mode='lines', - yaxis='y', - **kwargs): - traces = [] - sub_traces = [] - - for entity_id, df in self.main_data.entity_map_df.items(): - df = df.select_dtypes(np.number) - code = entity_id - try: - _, _, code = decode_entity_id(entity_id) - except Exception: - pass - - # 构造主图 - if main_chart == 'kline': - trace_name = '{}_kdata'.format(code) - trace = go.Candlestick(x=df.index, open=df['open'], close=df['close'], low=df['low'], high=df['high'], - name=trace_name, yaxis=yaxis, **kwargs) - traces.append(trace) - elif main_chart == 'scatter': - for col in df.columns: - trace_name = '{}_{}'.format(code, col) - ydata = df[col].values.tolist() - traces.append(go.Scatter(x=df.index, y=ydata, mode=mode, name=trace_name, yaxis=yaxis, **kwargs)) - - # 构造主图指标 - if self.factor_data_list: - for factor_data in self.factor_data_list: - if not factor_data.empty(): - factor_df = factor_data.entity_map_df.get(entity_id) - factor_df = factor_df.select_dtypes(np.number) - if pd_is_not_null(factor_df): - for col in factor_df.columns: - trace_name = '{}_{}'.format(code, col) - ydata = factor_df[col].values.tolist() - - line = go.Scatter(x=factor_df.index, y=ydata, mode=mode, name=trace_name, yaxis=yaxis, - **kwargs) - traces.append(line) - - # 构造幅图 - if self.has_sub_plot(): - for sub_data in self.sub_data_list: - sub_df = sub_data.entity_map_df.get(entity_id) - if pd_is_not_null(sub_df): - sub_df = sub_df.select_dtypes(np.number) - for col in sub_df.columns: - trace_name = '{}_{}'.format(code, col) - ydata = sub_df[col].values.tolist() - - def color(i): - if i > 0: - return 'red' - else: - return 'green' - - colors = [color(i) for i in ydata] - - the_sub_chart = None - if self.sub_col_chart is not None: - the_sub_chart = self.sub_col_chart.get(col) - if not the_sub_chart: - the_sub_chart = sub_chart - - if the_sub_chart == 'line': - sub_trace = go.Scatter(x=sub_df.index, y=ydata, name=trace_name, yaxis='y2', - marker=dict(color=colors)) - else: - sub_trace = go.Bar(x=sub_df.index, y=ydata, name=trace_name, yaxis='y2', - marker=dict(color=colors)) - sub_traces.append(sub_trace) - - return traces, sub_traces - - def add_rects(self, fig, yaxis='y'): - if self.rects: - for rect in self.rects: - fig.add_shape(type="rect", - x0=rect.x0, y0=rect.y0, x1=rect.x1, y1=rect.y1, - line=dict(color="RoyalBlue", - width=1), - # fillcolor="LightSkyBlue" - ) - fig.update_shapes(dict(xref='x', yref=yaxis)) - - def _draw(self, - main_chart='kline', - sub_chart='bar', - mode='lines', - width=None, - height=None, - title=None, - keep_ui_state=True, - show=False, - **kwargs): - yaxis = 'y' - traces, sub_traces = self.make_traces(main_chart=main_chart, sub_chart=sub_chart, mode=mode, yaxis=yaxis, - **kwargs) - - if sub_traces: - fig = make_subplots(rows=2, cols=1, row_heights=[0.8, 0.2], vertical_spacing=0.08, shared_xaxes=True) - fig.add_traces(traces, rows=[1] * len(traces), cols=[1] * len(traces)) - fig.add_traces(sub_traces, rows=[2] * len(sub_traces), cols=[1] * len(sub_traces)) - else: - fig = go.Figure() - fig.add_traces(traces) - - # 绘制矩形 - self.add_rects(fig, yaxis=yaxis) - - fig.update_layout(self.default_layout(width=width, height=height, title=title, keep_ui_state=keep_ui_state)) - - if sub_traces: - fig.update_layout(xaxis_rangeslider_visible=False) - fig.update_layout(xaxis2_rangeslider_visible=True, xaxis2_rangeslider_thickness=0.1) - # 绘制标志 - fig.layout['annotations'] = annotations(self.annotation_df, yref=yaxis) - - if show: - fig.show() - else: - return fig - - def draw_table(self, width=None, height=None, title=None, keep_ui_state=True, **kwargs): - cols = self.main_data.data_df.index.names + self.main_data.data_df.columns.tolist() - - index1 = self.main_data.data_df.index.get_level_values(0).tolist() - index2 = self.main_data.data_df.index.get_level_values(1).tolist() - values = [index1] + [index2] + [self.main_data.data_df[col] for col in self.main_data.data_df.columns] - - data = go.Table( - header=dict(values=cols, - fill_color=['#000080', '#000080'] + ['#0066cc'] * len(self.main_data.data_df.columns), - align='left', - font=dict(color='white', size=13)), - cells=dict(values=values, fill=dict(color='#F5F8FF'), align='left'), **kwargs) - - fig = go.Figure() - fig.add_traces([data]) - fig.update_layout(self.default_layout(width=width, height=height, title=title, keep_ui_state=keep_ui_state)) - - fig.show() - - -def annotations(annotation_df: pd.DataFrame, yref='y'): - """ - annotation_df format: - value flag color - entity_id timestamp - - :param annotation_df: - :param yref: specific yaxis e.g, y,y2,y3 - :return: - - """ - - if pd_is_not_null(annotation_df): - annotations = [] - for trace_name, df in annotation_df.groupby(level=0): - if pd_is_not_null(df): - for (_, timestamp), item in df.iterrows(): - if 'color' in item: - color = item['color'] - else: - color = '#ec0000' - - value = round(item['value'], 2) - annotations.append(dict( - x=timestamp, - y=value, - xref='x', - yref=yref, - text=item['flag'], - showarrow=True, - align='center', - arrowhead=2, - arrowsize=1, - arrowwidth=2, - # arrowcolor='#030813', - ax=-10, - ay=-30, - bordercolor='#c7c7c7', - borderwidth=1, - bgcolor=color, - opacity=0.8 - )) - return annotations - return None - - -def distribute(df, col): - import plotly.express as px - fig = px.histogram(df, x=col) - fig.show() - -if __name__ == '__main__': - from zvt.factors.zen import ZenFactor - - data_reader1 = ZenFactor(codes=['000338'], level='1d') - data_reader2 = ZenFactor(codes=['000338'], level='1wk') - print(data_reader2.data_df) - - stacked = StackedDrawer(data_reader1.drawer(), data_reader2.drawer()).draw_kline(show=True) - # df = Stock1dHfqKdata.query_data(code='000338', start_timestamp='2015-01-01') - # sub_df = FinanceFactor.query_data(code='000338', start_timestamp='2015-01-01', - # columns=[FinanceFactor.roe, FinanceFactor.entity_id, FinanceFactor.timestamp]) - # - # Drawer(main_df=df, sub_df_list=[sub_df]).draw_kline(show=True) diff --git a/zvt/contract/factor.py b/zvt/contract/factor.py deleted file mode 100644 index 45a009db..00000000 --- a/zvt/contract/factor.py +++ /dev/null @@ -1,552 +0,0 @@ -# -*- coding: utf-8 -*- -import enum -import json -import logging -import time -from typing import List, Union, Optional, Type - -import pandas as pd -from sqlalchemy import Column, String, Text -from sqlalchemy.ext.declarative import declarative_base - -from zvt.contract import IntervalLevel, EntityMixin -from zvt.contract import Mixin -from zvt.contract.api import get_data, df_to_db, get_db_session, del_data -from zvt.contract.reader import DataReader, DataListener -from zvt.contract.register import register_schema -from zvt.contract.zvt_context import factor_cls_registry -from zvt.utils.pd_utils import pd_is_not_null - - -class Indicator(object): - def __init__(self) -> None: - self.logger = logging.getLogger(self.__class__.__name__) - self.indicators = [] - - -class Transformer(Indicator): - - def __init__(self) -> None: - super().__init__() - - def transform(self, input_df: pd.DataFrame) -> pd.DataFrame: - """ - input_df format: - - col1 col2 col3 ... - entity_id timestamp - 1.2 0.5 0.3 ... - 1.0 0.7 0.2 ... - - the return result would change the columns and keep the format - - :param input_df: - :return: - """ - g = input_df.groupby(level=0) - if len(g.groups) == 1: - entity_id = input_df.index[0][0] - - df = input_df.reset_index(level=0, drop=True) - ret_df = self.transform_one(entity_id=entity_id, df=df) - ret_df['entity_id'] = entity_id - - return ret_df.set_index('entity_id', append=True).swaplevel(0, 1) - else: - return g.apply(lambda x: self.transform_one(x.index[0][0], x.reset_index(level=0, drop=True))) - - def transform_one(self, entity_id, df: pd.DataFrame) -> pd.DataFrame: - """ - df format: - - col1 col2 col3 ... - timestamp - 1.2 0.5 0.3 ... - 1.0 0.7 0.2 ... - - the return result would change the columns and keep the format - - :param df: - :return: - """ - return df - - -class Accumulator(Indicator): - - def __init__(self, acc_window: int = 1) -> None: - """ - - :param acc_window: the window size of acc for computing,default is 1 - """ - super().__init__() - self.acc_window = acc_window - - def acc(self, input_df: pd.DataFrame, acc_df: pd.DataFrame, states: dict) -> (pd.DataFrame, dict): - """ - - :param input_df: new input - :param acc_df: previous result - :param states: current states of the entity - :return: new result and states - """ - g = input_df.groupby(level=0) - if len(g.groups) == 1: - entity_id = input_df.index[0][0] - - df = input_df.reset_index(level=0, drop=True) - if pd_is_not_null(acc_df) and (entity_id == acc_df.index[0][0]): - acc_one_df = acc_df.reset_index(level=0, drop=True) - else: - acc_one_df = None - ret_df, state = self.acc_one(entity_id=entity_id, df=df, acc_df=acc_one_df, state=states.get(entity_id)) - if pd_is_not_null(ret_df): - ret_df['entity_id'] = entity_id - ret_df = ret_df.set_index('entity_id', append=True).swaplevel(0, 1) - ret_df['entity_id'] = entity_id - return ret_df, {entity_id: state} - return None, {entity_id: state} - else: - new_states = {} - - def cal_acc(x): - entity_id = x.index[0][0] - if pd_is_not_null(acc_df): - acc_g = acc_df.groupby(level=0) - acc_one_df = None - if entity_id in acc_g.groups: - acc_one_df = acc_g.get_group(entity_id) - if pd_is_not_null(acc_one_df): - acc_one_df = acc_one_df.reset_index(level=0, drop=True) - else: - acc_one_df = None - - one_result, state = self.acc_one(entity_id=entity_id, - df=x.reset_index(level=0, drop=True), - acc_df=acc_one_df, - state=states.get(x.index[0][0])) - - new_states[entity_id] = state - return one_result - - ret_df = g.apply(lambda x: cal_acc(x)) - return ret_df, new_states - - def acc_one(self, entity_id, df: pd.DataFrame, acc_df: pd.DataFrame, state: dict) -> (pd.DataFrame, dict): - """ - df format: - - col1 col2 col3 ... - timestamp - 1.2 0.5 0.3 ... - 1.0 0.7 0.2 ... - - the new result and state - - :param df: current input df - :param entity_id: current computing entity_id - :param acc_df: current result of the entity_id - :param state: current state of the entity_id - :return: new result and state of the entity_id - """ - return acc_df, state - - -class Scorer(object): - def __init__(self) -> None: - self.logger = logging.getLogger(self.__class__.__name__) - - def score(self, input_df: pd.DataFrame) -> pd.DataFrame: - """ - - :param input_df: current input df - :return: df with normal score - """ - return input_df - - -class FactorType(enum.Enum): - filter = 'filter' - score = 'score' - - -def register_class(target_class): - if target_class.__name__ not in ('Factor', 'FilterFactor', 'ScoreFactor', 'StateFactor'): - factor_cls_registry[target_class.__name__] = target_class - - -class FactorMeta(type): - def __new__(meta, name, bases, class_dict): - cls = type.__new__(meta, name, bases, class_dict) - register_class(cls) - return cls - - -FactorBase = declarative_base() - - -# 用于保存factor的状态 -class FactorState(FactorBase, Mixin): - __tablename__ = 'factor_state' - # 因子名字 - factor_name = Column(String(length=128)) - - # json string - state = Column(Text()) - - -register_schema(providers=['zvt'], db_name='factor_info', schema_base=FactorBase) - - -class Factor(DataReader, DataListener): - factor_type: FactorType = None - - # define the schema for persist,its columns should be same as indicators in transformer or accumulator - factor_schema: Type[Mixin] = None - - transformer: Transformer = None - accumulator: Accumulator = None - - def __init__(self, - data_schema: Type[Mixin], - entity_schema: Type[EntityMixin] = None, - provider: str = None, - entity_provider: str = None, - entity_ids: List[str] = None, - exchanges: List[str] = None, - codes: List[str] = None, - the_timestamp: Union[str, pd.Timestamp] = None, - start_timestamp: Union[str, pd.Timestamp] = None, - end_timestamp: Union[str, pd.Timestamp] = None, - columns: List = None, - filters: List = None, - order: object = None, - limit: int = None, - level: Union[str, IntervalLevel] = None, - category_field: str = 'entity_id', - time_field: str = 'timestamp', - computing_window: int = None, - # child added arguments - keep_all_timestamp: bool = False, - fill_method: str = 'ffill', - effective_number: int = None, - transformer: Transformer = None, - accumulator: Accumulator = None, - need_persist: bool = False, - dry_run: bool = False, - factor_name: str = None, - clear_state: bool = False, - not_load_data: bool = False) -> None: - """ - - :param computing_window: the window size for computing factor - :param keep_all_timestamp: whether fill all timestamp gap,default False - :param fill_method: - :param effective_number: - :param transformer: - :param accumulator: - :param need_persist: whether persist factor - :param dry_run: True for just computing factor, False for backtesting - """ - - self.not_load_data = not_load_data - - super().__init__(data_schema, entity_schema, provider, entity_provider, entity_ids, exchanges, codes, - the_timestamp, start_timestamp, end_timestamp, columns, filters, order, limit, level, - category_field, time_field, computing_window) - - # define unique name of your factor if you want to keep factor state - # the factor state is defined by factor_name and entity_id - if not factor_name: - self.factor_name = type(self).__name__.lower() - else: - self.factor_name = factor_name - - self.clear_state = clear_state - - self.keep_all_timestamp = keep_all_timestamp - self.fill_method = fill_method - self.effective_number = effective_number - - if transformer: - self.transformer = transformer - else: - self.transformer = self.__class__.transformer - - if accumulator: - self.accumulator = accumulator - else: - self.accumulator = self.__class__.accumulator - - self.need_persist = need_persist - self.dry_run = dry_run - - # 中间结果,不持久化 - # data_df->pipe_df - self.pipe_df: pd.DataFrame = None - - # 计算因子的结果,可持久化,通过对pipe_df的计算得到 - # pipe_df->factor_df - self.factor_df: pd.DataFrame = None - - # result_df是用于选股的标准df,通过对factor_df的计算得到 - # factor_df->result_df - self.result_df: pd.DataFrame = None - - # entity_id:state - self.states: dict = {} - - if self.clear_state: - self.clear_state_data() - elif self.need_persist: - self.load_factor() - - # 根据已经计算的factor_df和computing_window来保留data_df - # 因为读取data_df的目的是为了计算factor_df,选股和回测只依赖factor_df - # 所以如果有持久化的factor_df,只需保留需要用于计算的data_df即可 - if pd_is_not_null(self.data_df) and self.computing_window: - dfs = [] - for entity_id, df in self.data_df.groupby(level=0): - latest_laved = get_data(provider='zvt', - data_schema=self.factor_schema, - entity_id=entity_id, - order=self.factor_schema.timestamp.desc(), - limit=1, - index=[self.category_field, self.time_field], return_type='domain') - if latest_laved: - df1 = df[df.timestamp < latest_laved[0].timestamp].iloc[-self.computing_window:] - if pd_is_not_null(df1): - df = df[df.timestamp >= df1.iloc[0].timestamp] - dfs.append(df) - - self.data_df = pd.concat(dfs) - - self.register_data_listener(self) - - # the compute logic is not triggered from load data - # for the case:1)load factor from db 2)compute the result - if self.not_load_data: - self.compute() - - def load_data(self): - if self.not_load_data: - return - super().load_data() - - def load_factor(self): - # read state - states: List[FactorState] = FactorState.query_data(filters=[FactorState.factor_name == self.factor_name], - entity_ids=self.entity_ids, - return_type='domain') - if states: - for state in states: - self.states[state.entity_id] = self.decode_state(state.state) - - if self.dry_run: - # 如果只是为了计算因子,只需要读取acc_window的factor_df - if self.accumulator is not None: - self.factor_df = self.load_window_df(provider='zvt', data_schema=self.factor_schema, - window=self.accumulator.acc_window) - else: - self.factor_df = get_data(provider='zvt', - data_schema=self.factor_schema, - start_timestamp=self.start_timestamp, - entity_ids=self.entity_ids, - end_timestamp=self.end_timestamp, - index=[self.category_field, self.time_field]) - - col_map_object_hook = self.factor_col_map_object_hook() - if pd_is_not_null(self.factor_df) and col_map_object_hook: - for col in col_map_object_hook: - if col in self.factor_df.columns: - self.factor_df[col] = self.factor_df[col].apply( - lambda x: json.loads(x, object_hook=col_map_object_hook.get(col))) - - def factor_col_map_object_hook(self) -> dict: - """ - - :return:{col:object_hook} - """ - return {} - - def factor_state_object_hook(self): - return None - - def clear_state_data(self, entity_id=None): - if entity_id: - del_data(FactorState, - filters=[FactorState.factor_name == self.factor_name, FactorState.entity_id == entity_id], - provider='zvt') - del_data(self.factor_schema, filters=[self.factor_schema.entity_id == entity_id], provider='zvt') - else: - del_data(FactorState, filters=[FactorState.factor_name == self.factor_name], provider='zvt') - del_data(self.factor_schema, provider='zvt') - - def decode_state(self, state: str): - return json.loads(state, object_hook=self.factor_state_object_hook()) - - def encode_state(self, state: object): - return json.dumps(state, cls=self.factor_encoder()) - - def factor_encoder(self): - return None - - def pre_compute(self): - if not self.not_load_data and not pd_is_not_null(self.pipe_df): - self.pipe_df = self.data_df - - def do_compute(self): - self.logger.info('compute factor start') - self.compute_factor() - self.logger.info('compute factor finish') - - self.logger.info('compute result start') - self.compute_result() - self.logger.info('compute result finish') - - def compute_factor(self): - if self.not_load_data: - return - # 无状态的转换运算 - if pd_is_not_null(self.data_df) and self.transformer: - self.pipe_df = self.transformer.transform(self.data_df) - else: - self.pipe_df = self.data_df - - # 有状态的累加运算 - if pd_is_not_null(self.pipe_df) and self.accumulator: - self.factor_df, self.states = self.accumulator.acc(self.pipe_df, self.factor_df, self.states) - else: - self.factor_df = self.pipe_df - - def compute_result(self): - pass - - def after_compute(self): - if self.not_load_data: - return - if self.keep_all_timestamp: - self.fill_gap() - - if self.need_persist and pd_is_not_null(self.factor_df): - self.persist_factor() - - def compute(self): - self.pre_compute() - - self.logger.info(f'[[[ ~~~~~~~~factor:{self.factor_name} ~~~~~~~~]]]') - self.logger.info('do_compute start') - start_time = time.time() - self.do_compute() - cost_time = time.time() - start_time - self.logger.info('do_compute finished,cost_time:{}s'.format(cost_time)) - - self.logger.info('after_compute start') - start_time = time.time() - self.after_compute() - cost_time = time.time() - start_time - self.logger.info('after_compute finished,cost_time:{}s'.format(cost_time)) - self.logger.info(f'[[[ ^^^^^^^^factor:{self.factor_name} ^^^^^^^^]]]') - - def drawer_main_df(self) -> Optional[pd.DataFrame]: - return self.data_df - - def drawer_factor_df_list(self) -> Optional[List[pd.DataFrame]]: - if (self.transformer is not None or self.accumulator is not None) and pd_is_not_null(self.factor_df): - return [self.factor_df] - return None - - def drawer_sub_df_list(self) -> Optional[List[pd.DataFrame]]: - if (self.transformer is not None or self.accumulator is not None) and pd_is_not_null(self.result_df): - return [self.result_df] - return None - - def fill_gap(self): - # 该操作较慢,只适合做基本面的运算 - idx = pd.date_range(self.start_timestamp, self.end_timestamp) - new_index = pd.MultiIndex.from_product([self.result_df.index.levels[0], idx], - names=[self.category_field, self.time_field]) - self.result_df = self.result_df.loc[~self.result_df.index.duplicated(keep='first')] - self.result_df = self.result_df.reindex(new_index) - self.result_df = self.result_df.groupby(level=0).fillna(method=self.fill_method, limit=self.effective_number) - - def on_data_loaded(self, data: pd.DataFrame): - self.compute() - - def on_data_changed(self, data: pd.DataFrame): - """ - overwrite it for computing after data added - - Parameters - ---------- - data : - """ - self.compute() - - def on_entity_data_changed(self, entity, added_data: pd.DataFrame): - """ - overwrite it for computing after entity data added - - Parameters - ---------- - entity : - added_data : - """ - pass - - def persist_factor(self): - df = self.factor_df.copy() - # encode json columns - if pd_is_not_null(df) and self.factor_col_map_object_hook(): - for col in self.factor_col_map_object_hook(): - if col in df.columns: - df[col] = df[col].apply(lambda x: json.dumps(x, cls=self.factor_encoder())) - - if self.states: - session = get_db_session(provider='zvt', data_schema=FactorState) - g = df.groupby(level=0) - - for entity_id in self.states: - state = self.states[entity_id] - try: - if state: - domain_id = f'{self.factor_name}_{entity_id}' - factor_state: FactorState = session.query(FactorState).get(domain_id) - state_str = self.encode_state(state) - if factor_state: - factor_state.state = state_str - else: - factor_state = FactorState(id=domain_id, entity_id=entity_id, - factor_name=self.factor_name, - state=state_str) - session.add(factor_state) - session.commit() - if entity_id in g.groups: - df_to_db(df=df.loc[(entity_id,)], data_schema=self.factor_schema, provider='zvt', - force_update=False) - except Exception as e: - self.logger.error(f'{self.factor_name} {entity_id} save state error') - self.logger.exception(e) - # clear them if error happen - self.clear_state_data(entity_id) - else: - df_to_db(df=df, data_schema=self.factor_schema, provider='zvt', force_update=False) - - -class FilterFactor(Factor): - factor_type = FactorType.filter - - -class ScoreFactor(Factor): - factor_type = FactorType.score - scorer: Scorer = None - - def compute_result(self): - super().compute_result() - if pd_is_not_null(self.factor_df) and self.scorer: - self.result_df = self.scorer.score(self.factor_df) - - -# the __all__ is generated -__all__ = ['Indicator', 'Transformer', 'Accumulator', 'Scorer', 'FactorType', 'Factor', 'FilterFactor', 'ScoreFactor', - 'FactorMeta'] diff --git a/zvt/contract/reader.py b/zvt/contract/reader.py deleted file mode 100644 index 7e4022c6..00000000 --- a/zvt/contract/reader.py +++ /dev/null @@ -1,293 +0,0 @@ -# -*- coding: utf-8 -*- -import json -import logging -import time -from typing import List, Union, Type, Optional - -import pandas as pd - -from zvt.contract import IntervalLevel, Mixin, EntityMixin -from zvt.contract.api import get_entities -from zvt.contract.drawer import Drawable -from zvt.utils.pd_utils import pd_is_not_null -from zvt.utils.time_utils import to_pd_timestamp, now_pd_timestamp - - -class DataListener(object): - def on_data_loaded(self, data: pd.DataFrame) -> object: - """ - - Parameters - ---------- - data : the data loaded at first time - """ - raise NotImplementedError - - def on_data_changed(self, data: pd.DataFrame) -> object: - """ - - Parameters - ---------- - data : the data added - """ - raise NotImplementedError - - def on_entity_data_changed(self, entity: str, added_data: pd.DataFrame) -> object: - """ - - Parameters - ---------- - entity : the entity - added_data : the data added for the entity - """ - pass - - -class DataReader(Drawable): - logger = logging.getLogger(__name__) - - def __init__(self, - data_schema: Type[Mixin], - entity_schema: Type[EntityMixin], - provider: str = None, - entity_provider: str = None, - entity_ids: List[str] = None, - exchanges: List[str] = None, - codes: List[str] = None, - the_timestamp: Union[str, pd.Timestamp] = None, - start_timestamp: Union[str, pd.Timestamp] = None, - end_timestamp: Union[str, pd.Timestamp] = now_pd_timestamp(), - columns: List = None, - filters: List = None, - order: object = None, - limit: int = None, - level: IntervalLevel = None, - category_field: str = 'entity_id', - time_field: str = 'timestamp', - computing_window: int = None) -> None: - super().__init__() - self.logger = logging.getLogger(self.__class__.__name__) - - self.data_schema = data_schema - self.entity_schema = entity_schema - - self.provider = provider - self.entity_provider = entity_provider - - self.the_timestamp = the_timestamp - if the_timestamp: - self.start_timestamp = the_timestamp - self.end_timestamp = the_timestamp - else: - self.start_timestamp = start_timestamp - self.end_timestamp = end_timestamp - - self.start_timestamp = to_pd_timestamp(self.start_timestamp) - self.end_timestamp = to_pd_timestamp(self.end_timestamp) - - self.exchanges = exchanges - - if codes: - if type(codes) == str: - codes = codes.replace(' ', '') - if codes.startswith('[') and codes.endswith(']'): - codes = json.loads(codes) - else: - codes = codes.split(',') - - self.codes = codes - self.entity_ids = entity_ids - - # 转换成标准entity_id - if entity_schema and not self.entity_ids: - df = get_entities(entity_schema=entity_schema, provider=self.entity_provider, - exchanges=self.exchanges, codes=self.codes) - if pd_is_not_null(df): - self.entity_ids = df['entity_id'].to_list() - - self.filters = filters - self.order = order - self.limit = limit - - if level: - self.level = IntervalLevel(level) - else: - self.level = level - - self.category_field = category_field - self.time_field = time_field - self.computing_window = computing_window - - self.category_col = eval('self.data_schema.{}'.format(self.category_field)) - self.time_col = eval('self.data_schema.{}'.format(self.time_field)) - - self.columns = columns - - # we store the data in a multiple index(category_column,timestamp) Dataframe - if self.columns: - # support str - if type(columns[0]) == str: - self.columns = [] - for col in columns: - self.columns.append(eval('data_schema.{}'.format(col))) - - # always add category_column and time_field for normalizing - self.columns = list(set(self.columns) | {self.category_col, self.time_col}) - - self.data_listeners: List[DataListener] = [] - - self.data_df: pd.DataFrame = None - - self.load_data() - - def load_window_df(self, provider, data_schema, window): - window_df = None - - dfs = [] - for entity_id in self.entity_ids: - df = data_schema.query_data(provider=provider, - index=[self.category_field, self.time_field], - order=data_schema.timestamp.desc(), - entity_id=entity_id, - limit=window) - if pd_is_not_null(df): - dfs.append(df) - if dfs: - window_df = pd.concat(dfs) - window_df = window_df.sort_index(level=[0, 1]) - return window_df - - def load_data(self): - self.logger.info('load_data start') - start_time = time.time() - params = dict(entity_ids=self.entity_ids, provider=self.provider, - columns=self.columns, start_timestamp=self.start_timestamp, - end_timestamp=self.end_timestamp, filters=self.filters, - order=self.order, limit=self.limit, level=self.level, - index=[self.category_field, self.time_field], - time_field=self.time_field) - self.logger.info(f'query_data params:{params}') - - self.data_df = self.data_schema.query_data(entity_ids=self.entity_ids, provider=self.provider, - columns=self.columns, start_timestamp=self.start_timestamp, - end_timestamp=self.end_timestamp, filters=self.filters, - order=self.order, limit=self.limit, level=self.level, - index=[self.category_field, self.time_field], - time_field=self.time_field) - - cost_time = time.time() - start_time - self.logger.info('load_data finished, cost_time:{}'.format(cost_time)) - - for listener in self.data_listeners: - listener.on_data_loaded(self.data_df) - - def move_on(self, to_timestamp: Union[str, pd.Timestamp] = None, - timeout: int = 20) -> object: - """ - using continual fetching data in realtime - 1)get the data happened before to_timestamp,if not set,get all the data which means to now - 2)if computing_window set,the data_df would be cut for saving memory - - - :param to_timestamp: - :type to_timestamp: - :param timeout: - :type timeout: int - :return: - :rtype: - """ - if not pd_is_not_null(self.data_df): - self.load_data() - return - - start_time = time.time() - - # FIXME:we suppose history data should be there at first - has_got = [] - dfs = [] - changed = False - while True: - for entity_id, df in self.data_df.groupby(level=0): - if entity_id in has_got: - continue - - recorded_timestamp = df.index.levels[1].max() - - # move_on读取数据,表明之前的数据已经处理完毕,只需要保留computing_window的数据 - if self.computing_window: - df = df.iloc[-self.computing_window:] - - added_filter = [self.category_col == entity_id, self.time_col > recorded_timestamp] - if self.filters: - filters = self.filters + added_filter - else: - filters = added_filter - - added_df = self.data_schema.query_data(provider=self.provider, - columns=self.columns, - end_timestamp=to_timestamp, filters=filters, level=self.level, - index=[self.category_field, self.time_field]) - - if pd_is_not_null(added_df): - self.logger.info(f'got new data:{df.to_json(orient="records", force_ascii=False)}') - - for listener in self.data_listeners: - listener.on_entity_data_changed(entity=entity_id, added_data=added_df) - # if got data,just move to another entity_id - changed = True - has_got.append(entity_id) - df = df.append(added_df, sort=False) - dfs.append(df) - else: - cost_time = time.time() - start_time - if cost_time > timeout: - # if timeout,just add the old data - has_got.append(entity_id) - dfs.append(df) - self.logger.warning( - 'category:{} level:{} getting data timeout,to_timestamp:{},now:{}'.format(entity_id, - self.level, - to_timestamp, - now_pd_timestamp())) - continue - - if len(has_got) == len(self.data_df.index.levels[0]): - break - - if dfs: - self.data_df = pd.concat(dfs, sort=False) - self.data_df.sort_index(level=[0, 1], inplace=True) - - if changed: - for listener in self.data_listeners: - listener.on_data_changed(self.data_df) - - def register_data_listener(self, listener): - if listener not in self.data_listeners: - self.data_listeners.append(listener) - - # notify it once after registered - if pd_is_not_null(self.data_df): - listener.on_data_loaded(self.data_df) - - def deregister_data_listener(self, listener): - if listener in self.data_listeners: - self.data_listeners.remove(listener) - - def empty(self): - return not pd_is_not_null(self.data_df) - - def drawer_main_df(self) -> Optional[pd.DataFrame]: - return self.data_df - - -__all__ = ['DataListener', 'DataReader'] - -if __name__ == '__main__': - from zvt.domain import Stock1dKdata, Stock - - data_reader = DataReader(codes=['002572', '000338'], data_schema=Stock1dKdata, entity_schema=Stock, - start_timestamp='2017-01-01', - end_timestamp='2019-06-10') - - data_reader.draw(show=True) diff --git a/zvt/contract/register.py b/zvt/contract/register.py deleted file mode 100644 index ceac9222..00000000 --- a/zvt/contract/register.py +++ /dev/null @@ -1,121 +0,0 @@ -# -*- coding: utf-8 -*- -import logging -from typing import List - -import sqlalchemy -from sqlalchemy.ext.declarative import DeclarativeMeta - -from zvt.contract import EntityMixin, zvt_context, Mixin -from zvt.contract.api import get_db_engine, get_db_session_factory -from zvt.utils.utils import add_to_map_list - -logger = logging.getLogger(__name__) - - -def register_entity(entity_type: str = None): - """ - function for register entity type - - :param entity_type: - :type entity_type: - :return: - :rtype: - """ - - def register(cls): - # register the entity - if issubclass(cls, EntityMixin): - entity_type_ = entity_type - if not entity_type: - entity_type_ = cls.__name__.lower() - - if entity_type_ not in zvt_context.entity_types: - zvt_context.entity_types.append(entity_type_) - zvt_context.entity_schemas.append(cls) - zvt_context.entity_schema_map[entity_type_] = cls - return cls - - return register - - -def register_schema(providers: List[str], - db_name: str, - schema_base: DeclarativeMeta, - entity_type: str = None): - """ - function for register schema,please declare them before register - - :param providers: the supported providers for the schema - :type providers: - :param db_name: database name for the schema - :type db_name: - :param schema_base: - :type schema_base: - :param entity_type: the schema related entity_type - :type entity_type: - :return: - :rtype: - """ - schemas = [] - for item in schema_base._decl_class_registry.items(): - cls = item[1] - if type(cls) == DeclarativeMeta: - # register provider to the schema - for provider in providers: - if issubclass(cls, Mixin): - cls.register_provider(provider) - - if zvt_context.dbname_map_schemas.get(db_name): - schemas = zvt_context.dbname_map_schemas[db_name] - zvt_context.schemas.append(cls) - if entity_type: - add_to_map_list(the_map=zvt_context.entity_map_schemas, key=entity_type, value=cls) - schemas.append(cls) - - zvt_context.dbname_map_schemas[db_name] = schemas - - for provider in providers: - if provider not in zvt_context.providers: - zvt_context.providers.append(provider) - - if not zvt_context.provider_map_dbnames.get(provider): - zvt_context.provider_map_dbnames[provider] = [] - zvt_context.provider_map_dbnames[provider].append(db_name) - zvt_context.dbname_map_base[db_name] = schema_base - - # create the db & table - engine = get_db_engine(provider, db_name=db_name) - schema_base.metadata.create_all(engine) - - session_fac = get_db_session_factory(provider, db_name=db_name) - session_fac.configure(bind=engine) - - for provider in providers: - engine = get_db_engine(provider, db_name=db_name) - - # create index for 'timestamp','entity_id','code','report_period','updated_timestamp - for table_name, table in iter(schema_base.metadata.tables.items()): - index_list = [] - with engine.connect() as con: - rs = con.execute("PRAGMA INDEX_LIST('{}')".format(table_name)) - for row in rs: - index_list.append(row[1]) - - logger.debug('engine:{},table:{},index:{}'.format(engine, table_name, index_list)) - - for col in ['timestamp', 'entity_id', 'code', 'report_period', 'created_timestamp', 'updated_timestamp']: - if col in table.c: - column = eval('table.c.{}'.format(col)) - index_name = '{}_{}_index'.format(table_name, col) - if index_name not in index_list: - index = sqlalchemy.schema.Index(index_name, column) - index.create(engine) - for cols in [('timestamp', 'entity_id'), ('timestamp', 'code')]: - if (cols[0] in table.c) and (col[1] in table.c): - column0 = eval('table.c.{}'.format(col[0])) - column1 = eval('table.c.{}'.format(col[1])) - index_name = '{}_{}_{}_index'.format(table_name, col[0], col[1]) - if index_name not in index_list: - index = sqlalchemy.schema.Index(index_name, column0, - column1) - index.create(engine) diff --git a/zvt/contract/schema.py b/zvt/contract/schema.py deleted file mode 100644 index e0fc1706..00000000 --- a/zvt/contract/schema.py +++ /dev/null @@ -1,371 +0,0 @@ -# -*- coding: utf-8 -*- -import inspect -from datetime import timedelta -from typing import List, Union - -import pandas as pd -from sqlalchemy import Column, String, DateTime, Float, BIGINT -from sqlalchemy.orm import Session - -from zvt.contract import IntervalLevel -from zvt.utils.time_utils import date_and_time, is_same_time, now_pd_timestamp - - -class Mixin(object): - id = Column(String, primary_key=True) - # entity id for this model - entity_id = Column(String) - - # the meaning could be different for different case,most of time it means 'happen time' - timestamp = Column(DateTime) - - # unix epoch,same meaning with timestamp - # ts = Column(BIGINT) - - @classmethod - def help(cls): - print(inspect.getsource(cls)) - - @classmethod - def important_cols(cls): - return [] - - @classmethod - def time_field(cls): - return 'timestamp' - - @classmethod - def register_recorder_cls(cls, provider, recorder_cls): - """ - register the recorder for the schema - - :param provider: - :param recorder_cls: - """ - # don't make provider_map_recorder as class field,it should be created for the sub class as need - if not hasattr(cls, 'provider_map_recorder'): - cls.provider_map_recorder = {} - - if provider not in cls.provider_map_recorder: - cls.provider_map_recorder[provider] = recorder_cls - - @classmethod - def register_provider(cls, provider): - # don't make providers as class field,it should be created for the sub class as need - if not hasattr(cls, 'providers'): - cls.providers = [] - - if provider not in cls.providers: - cls.providers.append(provider) - - @classmethod - def test_data_correctness(cls, provider, data_samples): - for data in data_samples: - item = cls.query_data(provider=provider, ids=[data['id']], return_type='dict') - print(item) - for k in data: - if k == 'timestamp': - assert is_same_time(item[0][k], data[k]) - else: - assert item[0][k] == data[k] - - @classmethod - def query_data(cls, - provider_index: int = 0, - ids: List[str] = None, - entity_ids: List[str] = None, - entity_id: str = None, - codes: List[str] = None, - code: str = None, - level: Union[IntervalLevel, str] = None, - provider: str = None, - columns: List = None, - col_label: dict = None, - return_type: str = 'df', - start_timestamp: Union[pd.Timestamp, str] = None, - end_timestamp: Union[pd.Timestamp, str] = None, - filters: List = None, - session: Session = None, - order=None, - limit: int = None, - index: Union[str, list] = None, - time_field: str = 'timestamp'): - from .api import get_data - if not provider: - provider = cls.providers[provider_index] - return get_data(data_schema=cls, ids=ids, entity_ids=entity_ids, entity_id=entity_id, codes=codes, - code=code, level=level, provider=provider, columns=columns, col_label=col_label, - return_type=return_type, start_timestamp=start_timestamp, end_timestamp=end_timestamp, - filters=filters, session=session, order=order, limit=limit, index=index, time_field=time_field) - - @classmethod - def record_data(cls, - provider_index: int = 0, - provider: str = None, - exchanges=None, - entity_ids=None, - codes=None, - batch_size=None, - force_update=None, - sleeping_time=None, - default_size=None, - real_time=None, - fix_duplicate_way=None, - start_timestamp=None, - end_timestamp=None, - close_hour=None, - close_minute=None, - one_day_trading_minutes=None, - **kwargs): - if cls.provider_map_recorder: - print(f'{cls.__name__} registered recorders:{cls.provider_map_recorder}') - - if provider: - recorder_class = cls.provider_map_recorder[provider] - else: - recorder_class = cls.provider_map_recorder[cls.providers[provider_index]] - - # get args for specific recorder class - from zvt.contract.recorder import TimeSeriesDataRecorder - if issubclass(recorder_class, TimeSeriesDataRecorder): - args = [item for item in inspect.getfullargspec(cls.record_data).args if - item not in ('cls', 'provider_index', 'provider')] - else: - args = ['batch_size', 'force_update', 'sleeping_time'] - - # just fill the None arg to kw,so we could use the recorder_class default args - kw = {} - for arg in args: - tmp = eval(arg) - if tmp is not None: - kw[arg] = tmp - - # FixedCycleDataRecorder - from zvt.contract.recorder import FixedCycleDataRecorder - if issubclass(recorder_class, FixedCycleDataRecorder): - # contract: - # 1)use FixedCycleDataRecorder to record the data with IntervalLevel - # 2)the table of schema with IntervalLevel format is {entity}_{level}_[adjust_type]_{event} - table: str = cls.__tablename__ - try: - items = table.split('_') - if len(items) == 4: - adjust_type = items[2] - kw['adjust_type'] = adjust_type - level = IntervalLevel(items[1]) - except: - # for other schema not with normal format,but need to calculate size for remaining days - level = IntervalLevel.LEVEL_1DAY - - kw['level'] = level - - # add other custom args - for k in kwargs: - kw[k] = kwargs[k] - - r = recorder_class(**kw) - r.run() - return - else: - r = recorder_class(**kw) - r.run() - return - else: - print(f'no recorders for {cls.__name__}') - - -class NormalMixin(Mixin): - # the record created time in db - created_timestamp = Column(DateTime, default=pd.Timestamp.now()) - # the record updated time in db, some recorder would check it for whether need to refresh - updated_timestamp = Column(DateTime) - - -class Player(Mixin): - # 参与者类型 - player_type = Column(String(length=64)) - # 所属国家 - country = Column(String(length=32)) - # 编码 - code = Column(String(length=64)) - # 名字 - name = Column(String(length=128)) - - -class EntityMixin(Mixin): - # 标的类型 - entity_type = Column(String(length=64)) - # 所属交易所 - exchange = Column(String(length=32)) - # 编码 - code = Column(String(length=64)) - # 名字 - name = Column(String(length=128)) - # 上市日 - list_date = Column(DateTime) - # 退市日 - end_date = Column(DateTime) - - @classmethod - def get_trading_dates(cls, start_date=None, end_date=None): - """ - overwrite it to get the trading dates of the entity - - :param start_date: - :param end_date: - :return: - """ - return pd.date_range(start_date, end_date, freq='B') - - @classmethod - def get_trading_intervals(cls): - """ - overwrite it to get the trading intervals of the entity - - :return:[(start,end)] - """ - return [('09:30', '11:30'), ('13:00', '15:00')] - - @classmethod - def get_interval_timestamps(cls, start_date, end_date, level: IntervalLevel): - """ - generate the timestamps for the level - - :param start_date: - :param end_date: - :param level: - """ - - for current_date in cls.get_trading_dates(start_date=start_date, end_date=end_date): - if level == IntervalLevel.LEVEL_1DAY: - yield current_date - elif level == IntervalLevel.LEVEL_1WEEK: - if current_date.weekday() == 4: - yield current_date - else: - start_end_list = cls.get_trading_intervals() - - for start_end in start_end_list: - start = start_end[0] - end = start_end[1] - - current_timestamp = date_and_time(the_date=current_date, the_time=start) - end_timestamp = date_and_time(the_date=current_date, the_time=end) - - while current_timestamp <= end_timestamp: - yield current_timestamp - current_timestamp = current_timestamp + timedelta(minutes=level.to_minute()) - - @classmethod - def is_open_timestamp(cls, timestamp): - timestamp = pd.Timestamp(timestamp) - return is_same_time(timestamp, date_and_time(the_date=timestamp.date(), - the_time=cls.get_trading_intervals()[0][0])) - - @classmethod - def is_close_timestamp(cls, timestamp): - timestamp = pd.Timestamp(timestamp) - return is_same_time(timestamp, date_and_time(the_date=timestamp.date(), - the_time=cls.get_trading_intervals()[-1][1])) - - @classmethod - def is_finished_kdata_timestamp(cls, timestamp: pd.Timestamp, level: IntervalLevel): - """ - :param timestamp: the timestamp could be recorded in kdata of the level - :type timestamp: pd.Timestamp - :param level: - :type level: zvt.domain.common.IntervalLevel - :return: - :rtype: bool - """ - timestamp = pd.Timestamp(timestamp) - - for t in cls.get_interval_timestamps(timestamp.date(), timestamp.date(), level=level): - if is_same_time(t, timestamp): - return True - - return False - - @classmethod - def could_short(cls): - """ - whether could be shorted - - :return: - """ - return False - - @classmethod - def get_trading_t(cls): - """ - 0 means t+0 - 1 means t+1 - - :return: - """ - return 1 - - -class NormalEntityMixin(EntityMixin): - # the record created time in db - created_timestamp = Column(DateTime, default=pd.Timestamp.now()) - # the record updated time in db, some recorder would check it for whether need to refresh - updated_timestamp = Column(DateTime) - - -class Portfolio(EntityMixin): - @classmethod - def get_stocks(cls, - code=None, codes=None, ids=None, timestamp=now_pd_timestamp(), provider=None): - """ - the publishing policy of portfolio positions is different for different types, - overwrite this function for get the holding stocks in specific date - - :param code: portfolio(etf/block/index...) code - :param codes: portfolio(etf/block/index...) codes - :param ids: portfolio(etf/block/index...) ids - :param timestamp: the date of the holding stocks - :param provider: the data provider - :return: - """ - from zvt.contract.api import get_schema_by_name - schema_str = f'{cls.__name__}Stock' - portfolio_stock = get_schema_by_name(schema_str) - return portfolio_stock.query_data(provider=provider, code=code, codes=codes, timestamp=timestamp, ids=ids) - - -# 组合(Fund,Etf,Index,Block等)和个股(Stock)的关系 应该继承自该类 -# 该基础类可以这样理解: -# entity为组合本身,其包含了stock这种entity,timestamp为持仓日期,从py的"你知道你在干啥"的哲学出发,不加任何约束 -class PortfolioStock(Mixin): - # portfolio标的类型 - entity_type = Column(String(length=64)) - # portfolio所属交易所 - exchange = Column(String(length=32)) - # portfolio编码 - code = Column(String(length=64)) - # portfolio名字 - name = Column(String(length=128)) - - stock_id = Column(String) - stock_code = Column(String(length=64)) - stock_name = Column(String(length=128)) - - -# 支持时间变化,报告期标的调整 -class PortfolioStockHistory(PortfolioStock): - # 报告期,season1,half_year,season3,year - report_period = Column(String(length=32)) - # 3-31,6-30,9-30,12-31 - report_date = Column(DateTime) - - # 占净值比例 - proportion = Column(Float) - # 持有股票的数量 - shares = Column(Float) - # 持有股票的市值 - market_cap = Column(Float) - - -__all__ = ['EntityMixin', 'Mixin', 'NormalMixin', 'NormalEntityMixin', 'Portfolio', 'PortfolioStock', - 'PortfolioStockHistory'] diff --git a/zvt/contract/zvt_context.py b/zvt/contract/zvt_context.py deleted file mode 100644 index 4ea1b47d..00000000 --- a/zvt/contract/zvt_context.py +++ /dev/null @@ -1,40 +0,0 @@ -# -*- coding: utf-8 -*- - -# all registered providers -providers = [] - -# all registered entity types -entity_types = [] - -# all entity schemas -entity_schemas = [] - -# all registered schemas -schemas = [] - -# entity_type -> entity schema -entity_schema_map = {} - -# global sessions -sessions = {} - -# provider_dbname -> engine -db_engine_map = {} - -# provider_dbname -> session -db_session_map = {} - -# provider -> [db_name1,db_name2...] -provider_map_dbnames = {} - -# db_name -> [declarative_base1,declarative_base2...] -dbname_map_base = {} - -# db_name -> [declarative_meta1,declarative_meta2...] -dbname_map_schemas = {} - -# entity_type -> related schemas -entity_map_schemas = {} - -# factor class registry -factor_cls_registry = {} diff --git a/zvt/domain/__init__.py b/zvt/domain/__init__.py deleted file mode 100644 index 0af14350..00000000 --- a/zvt/domain/__init__.py +++ /dev/null @@ -1,88 +0,0 @@ -# -*- coding: utf-8 -*- -import enum - - -class BlockCategory(enum.Enum): - # 行业版块 - industry = 'industry' - # 概念版块 - concept = 'concept' - # 区域版块 - area = 'area' - # 上证指数 - sse = 'sse' - # 深圳指数 - szse = 'szse' - # 中证指数 - csi = 'csi' - # 国证指数 - cni = 'cni' - # ETF - etf = 'etf' - - -class ReportPeriod(enum.Enum): - # 有些基金的2,4季报只有10大持仓,半年报和年报有详细持仓,需要区别对待 - season1 = 'season1' - season2 = 'season2' - season3 = 'season3' - season4 = 'season4' - half_year = 'half_year' - year = 'year' - - -class InstitutionalInvestor(enum.Enum): - # 基金 - fund = 'fund' - # 社保 - social_security = 'social_security' - # 保险 - insurance = 'insurance' - # 外资 - qfii = 'qfii' - # 信托 - trust = 'trust' - # 券商 - broker = 'broker' - # 公司 - other = 'other' - - -# 用于区分不同的财务指标 -class CompanyType(enum.Enum): - qiye = 'qiye' - baoxian = 'baoxian' - yinhang = 'yinhang' - quanshang = 'quanshang' - -# the __all__ is generated -__all__ = ['BlockCategory', 'ReportPeriod', 'InstitutionalInvestor', 'CompanyType'] - -# __init__.py structure: -# common code of the package -# export interface in __all__ which contains __all__ of its sub modules - -# import all from submodule misc -from .misc import * -from .misc import __all__ as _misc_all -__all__ += _misc_all - -# import all from submodule quotes -from .quotes import * -from .quotes import __all__ as _quotes_all -__all__ += _quotes_all - -# import all from submodule meta -from .meta import * -from .meta import __all__ as _meta_all -__all__ += _meta_all - -# import all from submodule fundamental -from .fundamental import * -from .fundamental import __all__ as _fundamental_all -__all__ += _fundamental_all - -# import all from submodule trader_info -from .trader_info import * -from .trader_info import __all__ as _trader_info_all -__all__ += _trader_info_all \ No newline at end of file diff --git a/zvt/domain/fundamental/trading.py b/zvt/domain/fundamental/trading.py deleted file mode 100644 index c277b5d9..00000000 --- a/zvt/domain/fundamental/trading.py +++ /dev/null @@ -1,162 +0,0 @@ -# -*- coding: utf-8 -*- -from sqlalchemy import Column, String, Float -from sqlalchemy.ext.declarative import declarative_base - -from zvt.contract import Mixin -from zvt.contract.register import register_schema - -TradingBase = declarative_base() - - -class ManagerTrading(TradingBase, Mixin): - __tablename__ = 'manager_trading' - - provider = Column(String(length=32)) - code = Column(String(length=32)) - # 日期 变动人 变动数量(股) 交易均价(元) 结存股票(股) 交易方式 董监高管 高管职位 与高管关系 - # 2017-08-11 韦春 200 9.16 -- 竞价交易 刘韬 高管 兄弟姐妹 - - # 变动人 - trading_person = Column(String(length=32)) - # 变动数量 - volume = Column(Float) - # 交易均价 - price = Column(Float) - # 结存股票 - holding = Column(Float) - # 交易方式 - trading_way = Column(String(length=32)) - # 董监高管 - manager = Column(String(length=32)) - # 高管职位 - manager_position = Column(String(length=32)) - # 与高管关系 - relationship_with_manager = Column(String(length=32)) - - -class HolderTrading(TradingBase, Mixin): - __tablename__ = 'holder_trading' - - provider = Column(String(length=32)) - code = Column(String(length=32)) - - # 股东名称 - holder_name = Column(String(length=32)) - # 变动数量 - volume = Column(Float) - # 变动比例 - change_pct = Column(Float) - # 变动后持股比例 - holding_pct = Column(Float) - - -class BigDealTrading(TradingBase, Mixin): - __tablename__ = 'big_deal_trading' - - provider = Column(String(length=32)) - code = Column(String(length=32)) - - # 成交额 - turnover = Column(Float) - # 成交价 - price = Column(Float) - # 卖出营业部 - sell_broker = Column(String(length=128)) - # 买入营业部 - buy_broker = Column(String(length=128)) - # 折/溢价率 - compare_rate = Column(Float) - - -class MarginTrading(TradingBase, Mixin): - __tablename__ = 'margin_trading' - code = Column(String(length=32)) - - # 融资余额(元) - fin_value = Column(Float) - # 融资买入额(元) - fin_buy_value = Column(Float) - # 融资偿还额(元) - fin_refund_value = Column(Float) - # 融券余量(股) - sec_value = Column(Float) - # 融券卖出量(股) - sec_sell_value = Column(Float) - # 融券偿还量(股) - sec_refund_value = Column(Float) - # 融资融券余额(元) - fin_sec_value = Column(Float) - - -class DragonAndTiger(TradingBase, Mixin): - __tablename__ = 'dragon_and_tiger' - - provider = Column(String(length=32)) - code = Column(String(length=32)) - - # 异动原因 - reason = Column(String(length=128)) - # 成交额 - turnover = Column(Float) - # 涨幅 - change_pct = Column(Float) - # 净买入 - net_inflow = Column(Float) - - # 买入营业部 - net_in_dep1 = Column(String(length=128)) - net_in_dep1_money_in = Column(Float) - net_in_dep1_money_out = Column(Float) - net_in_dep1_rate = Column(Float) - - net_in_dep2 = Column(String(length=128)) - net_in_dep2_money_in = Column(Float) - net_in_dep2_money_out = Column(Float) - net_in_dep2_rate = Column(Float) - - net_in_dep3 = Column(String(length=128)) - net_in_dep3_money_in = Column(Float) - net_in_dep3_money_out = Column(Float) - net_in_dep3_rate = Column(Float) - - net_in_dep4 = Column(String(length=128)) - net_in_dep4_money_in = Column(Float) - net_in_dep4_money_out = Column(Float) - net_in_dep4_rate = Column(Float) - - net_in_dep5 = Column(String(length=128)) - net_in_dep5_money_in = Column(Float) - net_in_dep5_money_out = Column(Float) - net_in_dep5_rate = Column(Float) - - # 卖出营业部 - net_out_dep1 = Column(String(length=128)) - net_out_dep1_money_in = Column(Float) - net_out_dep1_money_out = Column(Float) - net_out_dep1_rate = Column(Float) - - net_out_dep2 = Column(String(length=128)) - net_out_dep2_money_in = Column(Float) - net_out_dep2_money_out = Column(Float) - net_out_dep2_rate = Column(Float) - - net_out_dep3 = Column(String(length=128)) - net_out_dep3_money_in = Column(Float) - net_out_dep3_money_out = Column(Float) - net_out_dep3_rate = Column(Float) - - net_out_dep4 = Column(String(length=128)) - net_out_dep4_money_in = Column(Float) - net_out_dep4_money_out = Column(Float) - net_out_dep4_rate = Column(Float) - - net_out_dep5 = Column(String(length=128)) - net_out_dep5_money_in = Column(Float) - net_out_dep5_money_out = Column(Float) - net_out_dep5_rate = Column(Float) - - -register_schema(providers=['eastmoney', 'joinquant'], db_name='trading', schema_base=TradingBase, entity_type='stock') - -# the __all__ is generated -__all__ = ['ManagerTrading', 'HolderTrading', 'BigDealTrading', 'MarginTrading', 'DragonAndTiger'] \ No newline at end of file diff --git a/zvt/domain/meta/__init__.py b/zvt/domain/meta/__init__.py deleted file mode 100644 index 0566f4f3..00000000 --- a/zvt/domain/meta/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -# -*- coding: utf-8 -*- -# the __all__ is generated -__all__ = [] - -# __init__.py structure: -# common code of the package -# export interface in __all__ which contains __all__ of its sub modules - -# import all from submodule stock_meta -from .stock_meta import * -from .stock_meta import __all__ as _stock_meta_all -__all__ += _stock_meta_all - -# import all from submodule fund_meta -from .fund_meta import * -from .fund_meta import __all__ as _fund_meta_all -__all__ += _fund_meta_all \ No newline at end of file diff --git a/zvt/domain/meta/fund_meta.py b/zvt/domain/meta/fund_meta.py deleted file mode 100644 index dbce8e7e..00000000 --- a/zvt/domain/meta/fund_meta.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -from sqlalchemy import Column, String, Integer -from sqlalchemy.ext.declarative import declarative_base - -from zvt.contract import Portfolio, PortfolioStockHistory -from zvt.contract.register import register_entity, register_schema -from zvt.utils import now_pd_timestamp - -FundMetaBase = declarative_base() - - -# 个股 -@register_entity(entity_type='fund') -class Fund(FundMetaBase, Portfolio): - __tablename__ = 'fund' - # 基金管理人 - advisor = Column(String(length=100)) - # 基金托管人 - trustee = Column(String(length=100)) - - # 编码 基金运作方式 - # 401001 开放式基金 - # 401002 封闭式基金 - # 401003 QDII - # 401004 FOF - # 401005 ETF - # 401006 LOF - # 基金运作方式编码 - operate_mode_id = Column(Integer) - # 基金运作方式 - operate_mode = Column(String(length=32)) - - # 编码 基金类别 - # 402001 股票型 - # 402002 货币型 - # 402003 债券型 - # 402004 混合型 - # 402005 基金型 - # 402006 贵金属 - # 402007 封闭式 - # 投资标的类型编码 - underlying_asset_type_id = Column(Integer) - # 投资标的类型 - underlying_asset_type = Column(String(length=32)) - - @classmethod - def get_stocks(cls, code=None, codes=None, ids=None, timestamp=now_pd_timestamp(), provider=None): - from zvt.api.quote import get_fund_stocks - return get_fund_stocks(code=code, codes=codes, ids=ids, timestamp=timestamp, provider=provider) - - -class FundStock(FundMetaBase, PortfolioStockHistory): - __tablename__ = 'fund_stock' - - -register_schema(providers=['joinquant'], db_name='fund_meta', schema_base=FundMetaBase) -# the __all__ is generated -__all__ = ['Fund', 'FundStock'] diff --git a/zvt/domain/meta/stock_meta.py b/zvt/domain/meta/stock_meta.py deleted file mode 100644 index ed933d60..00000000 --- a/zvt/domain/meta/stock_meta.py +++ /dev/null @@ -1,98 +0,0 @@ -# -*- coding: utf-8 -*- - -from sqlalchemy import Column, String, DateTime, BigInteger, Float -from sqlalchemy.ext.declarative import declarative_base - -from zvt.contract import EntityMixin -from zvt.contract.register import register_schema, register_entity -from zvt.contract import Portfolio, PortfolioStock, PortfolioStockHistory -from zvt.utils.time_utils import now_pd_timestamp - -StockMetaBase = declarative_base() - - -# 个股 -@register_entity(entity_type='stock') -class Stock(StockMetaBase, EntityMixin): - __tablename__ = 'stock' - - -# 板块 -@register_entity(entity_type='block') -class Block(StockMetaBase, Portfolio): - __tablename__ = 'block' - - # 板块类型,行业(industry),概念(concept) - category = Column(String(length=64)) - - -# 指数 -@register_entity(entity_type='index') -class Index(StockMetaBase, Portfolio): - __tablename__ = 'index' - - # 发布商 - publisher = Column(String(length=64)) - # 类别 - category = Column(String(length=64)) - # 基准点数 - base_point = Column(Float) - - -# etf -@register_entity(entity_type='etf') -class Etf(StockMetaBase, Portfolio): - __tablename__ = 'etf' - category = Column(String(length=64)) - - @classmethod - def get_stocks(cls, code=None, codes=None, ids=None, timestamp=now_pd_timestamp(), provider=None): - from zvt.api.quote import get_etf_stocks - return get_etf_stocks(code=code, codes=codes, ids=ids, timestamp=timestamp, provider=provider) - - -class BlockStock(StockMetaBase, PortfolioStock): - __tablename__ = 'block_stock' - - -class IndexStock(StockMetaBase, PortfolioStockHistory): - __tablename__ = 'index_stock' - - -class EtfStock(StockMetaBase, PortfolioStockHistory): - __tablename__ = 'etf_stock' - - -# 个股详情 -@register_entity(entity_type='stock_detail') -class StockDetail(StockMetaBase, EntityMixin): - __tablename__ = 'stock_detail' - - industries = Column(String) - industry_indices = Column(String) - concept_indices = Column(String) - area_indices = Column(String) - - # 成立日期 - date_of_establishment = Column(DateTime) - # 公司简介 - profile = Column(String(length=1024)) - # 主营业务 - main_business = Column(String(length=512)) - # 发行量(股) - issues = Column(BigInteger) - # 发行价格 - price = Column(Float) - # 募资净额(元) - raising_fund = Column(Float) - # 发行市盈率 - issue_pe = Column(Float) - # 网上中签率 - net_winning_rate = Column(Float) - - -register_schema(providers=['joinquant', 'eastmoney', 'exchange', 'sina'], db_name='stock_meta', - schema_base=StockMetaBase) - -# the __all__ is generated -__all__ = ['Stock', 'Block', 'Index', 'Etf', 'BlockStock', 'IndexStock', 'EtfStock', 'StockDetail'] \ No newline at end of file diff --git a/zvt/domain/quotes/__init__.py b/zvt/domain/quotes/__init__.py deleted file mode 100644 index 21fd15f5..00000000 --- a/zvt/domain/quotes/__init__.py +++ /dev/null @@ -1,102 +0,0 @@ -# -*- coding: utf-8 -*- -from sqlalchemy import String, Column, Float - -from zvt.contract import Mixin - - -class KdataCommon(Mixin): - provider = Column(String(length=32)) - code = Column(String(length=32)) - name = Column(String(length=32)) - # Enum constraint is not extendable - # level = Column(Enum(IntervalLevel, values_callable=enum_value)) - level = Column(String(length=32)) - - # 开盘价 - open = Column(Float) - # 收盘价 - close = Column(Float) - # 最高价 - high = Column(Float) - # 最低价 - low = Column(Float) - # 成交量 - volume = Column(Float) - # 成交金额 - turnover = Column(Float) - - -class TickCommon(Mixin): - provider = Column(String(length=32)) - code = Column(String(length=32)) - name = Column(String(length=32)) - level = Column(String(length=32)) - - order = Column(String(length=32)) - price = Column(Float) - volume = Column(Float) - turnover = Column(Float) - direction = Column(String(length=32)) - order_type = Column(String(length=32)) - - -class BlockKdataCommon(KdataCommon): - pass - - -class IndexKdataCommon(KdataCommon): - pass - - -class EtfKdataCommon(KdataCommon): - turnover_rate = Column(Float) - - # ETF 累计净值(货币 ETF 为七日年化) - cumulative_net_value = Column(Float) - # ETF 净值增长率 - change_pct = Column(Float) - - -class StockKdataCommon(KdataCommon): - # 涨跌幅 - change_pct = Column(Float) - # 换手率 - turnover_rate = Column(Float) - - -# the __all__ is generated -__all__ = ['KdataCommon', 'TickCommon', 'BlockKdataCommon', 'IndexKdataCommon', 'EtfKdataCommon', 'StockKdataCommon'] - -# __init__.py structure: -# common code of the package -# export interface in __all__ which contains __all__ of its sub modules - -# import all from submodule trade_day -from .trade_day import * -from .trade_day import __all__ as _trade_day_all -__all__ += _trade_day_all - -# import all from submodule common -from .common import * -from .common import __all__ as _common_all -__all__ += _common_all - -# import all from submodule index -from .index import * -from .index import __all__ as _index_all -__all__ += _index_all - -# import all from submodule etf -from .etf import * -from .etf import __all__ as _etf_all -__all__ += _etf_all - -# import all from submodule stock -from .stock import * -from .stock import __all__ as _stock_all -__all__ += _stock_all - -# import all from submodule block -from .block import * -from .block import __all__ as _block_all -__all__ += _block_all \ No newline at end of file diff --git a/zvt/domain/quotes/common.py b/zvt/domain/quotes/common.py deleted file mode 100644 index 7ca22fae..00000000 --- a/zvt/domain/quotes/common.py +++ /dev/null @@ -1,70 +0,0 @@ -# -*- coding: utf-8 -*- -from sqlalchemy import String, Column, Float - -from zvt.contract import Mixin - - -class KdataCommon(Mixin): - provider = Column(String(length=32)) - code = Column(String(length=32)) - name = Column(String(length=32)) - # Enum constraint is not extendable - # level = Column(Enum(IntervalLevel, values_callable=enum_value)) - level = Column(String(length=32)) - - # 如果是股票,代表前复权数据 - # 开盘价 - open = Column(Float) - # 收盘价 - close = Column(Float) - # 最高价 - high = Column(Float) - # 最低价 - low = Column(Float) - # 成交量 - volume = Column(Float) - # 成交金额 - turnover = Column(Float) - - -class TickCommon(Mixin): - provider = Column(String(length=32)) - code = Column(String(length=32)) - name = Column(String(length=32)) - level = Column(String(length=32)) - - order = Column(String(length=32)) - price = Column(Float) - volume = Column(Float) - turnover = Column(Float) - direction = Column(String(length=32)) - order_type = Column(String(length=32)) - - -class BlockKdataCommon(KdataCommon): - pass - - -class IndexKdataCommon(KdataCommon): - pass - - -class EtfKdataCommon(KdataCommon): - turnover_rate = Column(Float) - - # ETF 累计净值(货币 ETF 为七日年化) - cumulative_net_value = Column(Float) - # ETF 净值增长率 - change_pct = Column(Float) - - -class StockKdataCommon(KdataCommon): - # 涨跌幅 - change_pct = Column(Float) - # 换手率 - turnover_rate = Column(Float) - - -__all__ = ['KdataCommon', 'TickCommon', 'BlockKdataCommon', 'IndexKdataCommon', 'EtfKdataCommon', 'StockKdataCommon'] -# the __all__ is generated -__all__ = ['KdataCommon', 'TickCommon', 'BlockKdataCommon', 'IndexKdataCommon', 'EtfKdataCommon', 'StockKdataCommon'] \ No newline at end of file diff --git a/zvt/factors/fundamental/finance_factor.py b/zvt/factors/fundamental/finance_factor.py deleted file mode 100644 index 244d8bc3..00000000 --- a/zvt/factors/fundamental/finance_factor.py +++ /dev/null @@ -1,158 +0,0 @@ -# -*- coding: utf-8 -*- -import operator -from itertools import accumulate -from typing import List, Union, Type - -import pandas as pd - -from zvt.contract import IntervalLevel, Mixin, EntityMixin -from zvt.contract.factor import Factor, Transformer, Accumulator, FilterFactor -from zvt.domain import FinanceFactor, BalanceSheet, Stock - - -class FinanceBaseFactor(Factor): - def __init__(self, - data_schema: Type[Mixin] = FinanceFactor, - entity_schema: Type[EntityMixin] = Stock, - provider: str = None, - entity_provider: str = None, - entity_ids: List[str] = None, - exchanges: List[str] = None, - codes: List[str] = None, - the_timestamp: Union[str, pd.Timestamp] = None, - start_timestamp: Union[str, pd.Timestamp] = None, - end_timestamp: Union[str, pd.Timestamp] = None, - columns: List = None, - filters: List = None, - order: object = None, - limit: int = None, - level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY, - category_field: str = 'entity_id', - time_field: str = 'timestamp', - computing_window: int = None, - keep_all_timestamp: bool = False, - fill_method: str = 'ffill', - effective_number: int = None, - transformer: Transformer = None, - accumulator: Accumulator = None, - need_persist: bool = False, - dry_run: bool = False, - factor_name: str = None, - clear_state: bool = False, - not_load_data: bool = False) -> None: - if not columns: - columns = data_schema.important_cols() - - super().__init__(data_schema, entity_schema, provider, entity_provider, entity_ids, exchanges, codes, - the_timestamp, start_timestamp, end_timestamp, columns, filters, order, limit, level, - category_field, time_field, computing_window, keep_all_timestamp, fill_method, - effective_number, transformer, accumulator, need_persist, dry_run, factor_name, clear_state, - not_load_data) - - -class GoodCompanyFactor(FinanceBaseFactor, FilterFactor): - def __init__(self, data_schema: Type[Mixin] = FinanceFactor, entity_schema: EntityMixin = Stock, - provider: str = None, - entity_provider: str = None, entity_ids: List[str] = None, exchanges: List[str] = None, - codes: List[str] = None, the_timestamp: Union[str, pd.Timestamp] = None, - start_timestamp: Union[str, pd.Timestamp] = None, end_timestamp: Union[str, pd.Timestamp] = None, - # 高roe,高现金流,低财务杠杆,有增长 - columns: List = (FinanceFactor.roe, - FinanceFactor.op_income_growth_yoy, - FinanceFactor.net_profit_growth_yoy, - FinanceFactor.report_period, - FinanceFactor.op_net_cash_flow_per_op_income, - FinanceFactor.sales_net_cash_flow_per_op_income, - FinanceFactor.current_ratio, - FinanceFactor.debt_asset_ratio), - filters: List = (FinanceFactor.roe >= 0.02, - FinanceFactor.op_income_growth_yoy >= 0.05, - FinanceFactor.net_profit_growth_yoy >= 0.05, - FinanceFactor.op_net_cash_flow_per_op_income >= 0.1, - FinanceFactor.sales_net_cash_flow_per_op_income >= 0.3, - FinanceFactor.current_ratio >= 1, - FinanceFactor.debt_asset_ratio <= 0.5), - order: object = None, limit: int = None, - level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY, category_field: str = 'entity_id', - time_field: str = 'timestamp', computing_window: int = None, keep_all_timestamp: bool = True, - fill_method: str = 'ffill', effective_number: int = None, transformer: Transformer = None, - accumulator: Accumulator = None, need_persist: bool = False, dry_run: bool = False, - factor_name: str = None, clear_state: bool = False, not_load_data: bool = False, - # 3 years - window='1095d', - count=8, - col_period_threshold={'roe': 0.02}) -> None: - self.window = window - self.count = count - - # 对于根据年度计算才有意义的指标,比如roe,我们会对不同季度的值区别处理,传入的参数为季度值 - self.col_period_threshold = col_period_threshold - if self.col_period_threshold: - if 'report_period' not in columns and (data_schema.report_period not in columns): - columns.append(data_schema.report_period) - - self.logger.info(f'using data_schema:{data_schema.__name__}') - - super().__init__(data_schema, entity_schema, provider, entity_provider, entity_ids, exchanges, codes, - the_timestamp, start_timestamp, end_timestamp, columns, filters, order, limit, level, - category_field, time_field, computing_window, keep_all_timestamp, fill_method, - effective_number, transformer, accumulator, need_persist, dry_run, factor_name, clear_state, - not_load_data) - - def compute_factor(self): - def filter_df(df): - se = pd.Series(index=df.index) - for index, row in df.iterrows(): - - if row.report_period == 'year': - mul = 4 - elif row.report_period == 'season3': - mul = 3 - elif row.report_period == 'half_year': - mul = 2 - else: - mul = 1 - - filters = [] - for col in self.col_period_threshold: - col_se = eval(f'row.{col}') - filters.append(col_se >= mul * self.col_period_threshold[col]) - se[index] = list(accumulate(filters, func=operator.__and__))[-1] - - return se - - if self.col_period_threshold: - self.factor_df = self.data_df.loc[lambda df: filter_df(df), :] - - self.factor_df = pd.DataFrame(index=self.data_df.index, columns=['count'], data=1) - - self.factor_df = self.factor_df.reset_index(level=1) - - self.factor_df = self.factor_df.groupby(level=0).rolling(window=self.window, on=self.time_field).count() - - self.factor_df = self.factor_df.reset_index(level=0, drop=True) - self.factor_df = self.factor_df.set_index(self.time_field, append=True) - - self.factor_df = self.factor_df.loc[(slice(None), slice(self.start_timestamp, self.end_timestamp)), :] - - self.logger.info('factor:{},factor_df:\n{}'.format(self.factor_name, self.factor_df)) - - def compute_result(self): - self.result_df = self.factor_df.apply(lambda x: x >= self.count) - - self.logger.info('factor:{},result_df:\n{}'.format(self.factor_name, self.result_df)) - - -if __name__ == '__main__': - # f1 = GoodCompanyFactor(keep_all_timestamp=False) - # print(f1.result_df) - - # 高股息 低应收 - factor2 = GoodCompanyFactor(data_schema=BalanceSheet, - columns=[BalanceSheet.accounts_receivable], - filters=[ - BalanceSheet.accounts_receivable <= 0.2 * BalanceSheet.total_current_assets], - col_period_threshold=None, keep_all_timestamp=False) - print(factor2.result_df) -# the __all__ is generated -__all__ = ['FinanceBaseFactor', 'GoodCompanyFactor'] diff --git a/zvt/factors/ma/ma_factor.py b/zvt/factors/ma/ma_factor.py deleted file mode 100644 index 0253b55b..00000000 --- a/zvt/factors/ma/ma_factor.py +++ /dev/null @@ -1,147 +0,0 @@ -# -*- coding: utf-8 -*- -import argparse -from typing import List, Union, Type, Optional - -import pandas as pd - -from zvt.contract import IntervalLevel, EntityMixin, AdjustType -from zvt.contract.api import get_entities, get_schema_by_name -from zvt.contract.factor import Accumulator -from zvt.contract.factor import Transformer -from zvt.domain import Stock -from zvt.factors.algorithm import MaTransformer, MaAndVolumeTransformer -from zvt.factors.technical_factor import TechnicalFactor -from zvt.utils.time_utils import now_pd_timestamp - - -def get_ma_factor_schema(entity_type: str, - level: Union[IntervalLevel, str] = IntervalLevel.LEVEL_1DAY): - if type(level) == str: - level = IntervalLevel(level) - - schema_str = '{}{}MaFactor'.format(entity_type.capitalize(), level.value.capitalize()) - - return get_schema_by_name(schema_str) - - -class MaFactor(TechnicalFactor): - def __init__(self, entity_schema: Type[EntityMixin] = Stock, provider: str = None, entity_provider: str = None, - entity_ids: List[str] = None, exchanges: List[str] = None, codes: List[str] = None, - the_timestamp: Union[str, pd.Timestamp] = None, start_timestamp: Union[str, pd.Timestamp] = None, - end_timestamp: Union[str, pd.Timestamp] = None, columns: List = None, filters: List = None, - order: object = None, limit: int = None, level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY, - category_field: str = 'entity_id', time_field: str = 'timestamp', computing_window: int = None, - keep_all_timestamp: bool = False, fill_method: str = 'ffill', effective_number: int = None, - need_persist: bool = False, - dry_run: bool = False, factor_name: str = None, clear_state: bool = False, not_load_data: bool = False, - adjust_type: Union[AdjustType, str] = None, windows=None) -> None: - if need_persist: - self.factor_schema = get_ma_factor_schema(entity_type=entity_schema.__name__, level=level) - - if not windows: - windows = [5, 10, 34, 55, 89, 144, 120, 250] - self.windows = windows - transformer: Transformer = MaTransformer(windows=windows) - - super().__init__(entity_schema, provider, entity_provider, entity_ids, exchanges, codes, the_timestamp, - start_timestamp, end_timestamp, columns, filters, order, limit, level, category_field, - time_field, computing_window, keep_all_timestamp, fill_method, effective_number, transformer, - None, need_persist, dry_run, factor_name, clear_state, not_load_data, adjust_type) - - def drawer_factor_df_list(self) -> Optional[List[pd.DataFrame]]: - return [self.factor_df[self.transformer.indicators]] - - -class CrossMaFactor(MaFactor): - def compute_result(self): - super().compute_result() - cols = [f'ma{window}' for window in self.windows] - s = self.factor_df[cols[0]] > self.factor_df[cols[1]] - current_col = cols[1] - for col in cols[2:]: - s = s & (self.factor_df[current_col] > self.factor_df[col]) - current_col = col - - print(self.factor_df[s]) - self.result_df = s.to_frame(name='score') - - -class VolumeUpMaFactor(TechnicalFactor): - - def __init__(self, entity_schema: Type[EntityMixin] = Stock, provider: str = None, entity_provider: str = None, - entity_ids: List[str] = None, exchanges: List[str] = None, codes: List[str] = None, - the_timestamp: Union[str, pd.Timestamp] = None, start_timestamp: Union[str, pd.Timestamp] = None, - end_timestamp: Union[str, pd.Timestamp] = None, filters: List = None, - order: object = None, limit: int = None, level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY, - category_field: str = 'entity_id', time_field: str = 'timestamp', computing_window: int = None, - keep_all_timestamp: bool = False, fill_method: str = 'ffill', effective_number: int = None, - accumulator: Accumulator = None, need_persist: bool = False, - dry_run: bool = False, factor_name: str = None, clear_state: bool = False, not_load_data: bool = False, - adjust_type: Union[AdjustType, str] = None, windows=None, vol_windows=None) -> None: - if not windows: - windows = [250] - if not vol_windows: - vol_windows = [30] - - self.windows = windows - self.vol_windows = vol_windows - - columns: List = ['id', 'entity_id', 'timestamp', 'level', 'open', 'close', 'high', 'low', 'volume', - 'turnover'] - - transformer: Transformer = MaAndVolumeTransformer(windows=windows, vol_windows=vol_windows) - - super().__init__(entity_schema, provider, entity_provider, entity_ids, exchanges, codes, the_timestamp, - start_timestamp, end_timestamp, columns, filters, order, limit, level, category_field, - time_field, computing_window, keep_all_timestamp, fill_method, effective_number, transformer, - accumulator, need_persist, dry_run, factor_name, clear_state, not_load_data, adjust_type) - - def compute_result(self): - super().compute_result() - - # 价格刚上均线 - cols = [f'ma{window}' for window in self.windows] - filter_se = (self.factor_df['close'] > self.factor_df[cols[0]]) & ( - self.factor_df['close'] < 1.1 * self.factor_df[cols[0]]) - for col in cols[1:]: - filter_se = filter_se & (self.factor_df['close'] > self.factor_df[col]) - - # 放量 - if self.vol_windows: - vol_cols = [f'vol_ma{window}' for window in self.vol_windows] - filter_se = filter_se & (self.factor_df['volume'] > 2 * self.factor_df[vol_cols[0]]) - for col in vol_cols[1:]: - filter_se = filter_se & (self.factor_df['volume'] > 2 * self.factor_df[col]) - - # 成交额大于1亿️ - filter_se = filter_se & (self.factor_df['turnover'] > 100000000) - - print(self.factor_df[filter_se]) - self.result_df = filter_se.to_frame(name='score') - - -if __name__ == '__main__': - print('start') - parser = argparse.ArgumentParser() - parser.add_argument('--level', help='trading level', default='1d', - choices=[item.value for item in IntervalLevel]) - parser.add_argument('--start', help='start code', default='000001') - parser.add_argument('--end', help='end code', default='000005') - - args = parser.parse_args() - - level = IntervalLevel(args.level) - start = args.start - end = args.end - - entities = get_entities(provider='eastmoney', entity_type='stock', columns=[Stock.entity_id, Stock.code], - filters=[Stock.code >= start, Stock.code < end]) - - codes = entities.index.to_list() - - factor = VolumeUpMaFactor(entity_ids=['stock_sz_000338'], start_timestamp='2020-01-01', - end_timestamp=now_pd_timestamp(), need_persist=False, - level=level) - print(factor.result_df) -# the __all__ is generated -__all__ = ['get_ma_factor_schema', 'MaFactor', 'CrossMaFactor', 'VolumeUpMaFactor'] diff --git a/zvt/factors/ma/top_bottom_factor.py b/zvt/factors/ma/top_bottom_factor.py deleted file mode 100644 index 2df41386..00000000 --- a/zvt/factors/ma/top_bottom_factor.py +++ /dev/null @@ -1,68 +0,0 @@ -# -*- coding: utf-8 -*- -from typing import List, Union - -import pandas as pd - -from zvt.contract import AdjustType -from zvt.contract import IntervalLevel, EntityMixin -from zvt.contract.drawer import Drawer -from zvt.contract.factor import Accumulator -from zvt.contract.factor import Transformer -from zvt.contract.reader import DataReader -from zvt.domain import Stock, Stock1dKdata -from zvt.factors.technical_factor import TechnicalFactor -from zvt.utils.time_utils import now_pd_timestamp - - -class TopBottomTransformer(Transformer): - def __init__(self, window=20) -> None: - super().__init__() - self.window = window - - def transform(self, input_df) -> pd.DataFrame: - top_df = input_df['high'].groupby(level=0).rolling(window=self.window, min_periods=self.window).max() - top_df = top_df.reset_index(level=0, drop=True) - input_df['top'] = top_df - - bottom_df = input_df['high'].groupby(level=0).rolling(window=self.window, min_periods=self.window).min() - bottom_df = bottom_df.reset_index(level=0, drop=True) - input_df['bottom'] = bottom_df - - return input_df - - -class TopBottomFactor(TechnicalFactor): - def __init__(self, entity_schema: EntityMixin = Stock, provider: str = None, entity_provider: str = None, - entity_ids: List[str] = None, exchanges: List[str] = None, codes: List[str] = None, - the_timestamp: Union[str, pd.Timestamp] = None, start_timestamp: Union[str, pd.Timestamp] = None, - end_timestamp: Union[str, pd.Timestamp] = None, - columns: List = ['id', 'entity_id', 'timestamp', 'level', 'open', 'close', 'high', 'low'], - filters: List = None, order: object = None, limit: int = None, - level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY, category_field: str = 'entity_id', - time_field: str = 'timestamp', computing_window: int = None, keep_all_timestamp: bool = False, - fill_method: str = 'ffill', effective_number: int = None, - accumulator: Accumulator = None, need_persist: bool = False, dry_run: bool = False, - factor_name: str = None, clear_state: bool = False, not_load_data: bool = False, - adjust_type: Union[AdjustType, str] = None, window=30) -> None: - self.adjust_type = adjust_type - - transformer = TopBottomTransformer(window=window) - - super().__init__(entity_schema, provider, entity_provider, entity_ids, exchanges, codes, the_timestamp, - start_timestamp, end_timestamp, columns, filters, order, limit, level, category_field, - time_field, computing_window, keep_all_timestamp, fill_method, effective_number, transformer, - accumulator, need_persist, dry_run, factor_name, clear_state, not_load_data, adjust_type) - - -if __name__ == '__main__': - factor = TopBottomFactor(codes=['601318'], start_timestamp='2005-01-01', - end_timestamp=now_pd_timestamp(), - level=IntervalLevel.LEVEL_1DAY, window=120) - print(factor.factor_df) - - data_reader1 = DataReader(codes=['601318'], data_schema=Stock1dKdata, entity_schema=Stock) - - drawer = Drawer(main_df=data_reader1.data_df, factor_df_list=[factor.factor_df[['top', 'bottom']]]) - drawer.draw_kline(show=True) -# the __all__ is generated -__all__ = ['TopBottomTransformer', 'TopBottomFactor'] diff --git a/zvt/factors/macd/macd_factor.py b/zvt/factors/macd/macd_factor.py deleted file mode 100644 index 7a7e8612..00000000 --- a/zvt/factors/macd/macd_factor.py +++ /dev/null @@ -1,71 +0,0 @@ -# -*- coding: utf-8 -*- -from typing import List, Optional - -import numpy as np -import pandas as pd - -from zvt.factors.algorithm import MacdTransformer -from zvt.factors.technical_factor import TechnicalFactor - - -class MacdFactor(TechnicalFactor): - transformer = MacdTransformer(count_live_dead=True) - - def drawer_factor_df_list(self) -> Optional[List[pd.DataFrame]]: - return None - - def drawer_sub_df_list(self) -> Optional[List[pd.DataFrame]]: - return [self.factor_df[['diff', 'dea', 'macd']]] - - def drawer_sub_col_chart(self) -> Optional[dict]: - return {'diff': 'line', - 'dea': 'line', - 'macd': 'bar'} - - -class BullFactor(MacdFactor): - def compute_result(self): - super().compute_result() - self.result_df = self.factor_df['bull'].to_frame(name='score') - - -class KeepBullFactor(BullFactor): - keep_window = 20 - - def compute_result(self): - super().compute_result() - df = self.result_df['score'].groupby(level=0).rolling(window=self.keep_window, - min_periods=self.keep_window).apply( - lambda x: np.logical_and.reduce(x)) - df = df.reset_index(level=0, drop=True) - self.result_df['score'] = df - - -# 金叉 死叉 持续时间 切换点 -class LiveOrDeadFactor(MacdFactor): - pattern = [-5, 1] - - def compute_result(self): - super().compute_result() - self.factor_df['pre'] = self.factor_df['live_count'].shift() - s = (self.factor_df['pre'] <= self.pattern[0]) & (self.factor_df['live_count'] >= self.pattern[1]) - self.result_df = s.to_frame(name='score') - - -class GoldCrossFactor(MacdFactor): - def compute_result(self): - super().compute_result() - s = self.factor_df['live'] == 1 - self.result_df = s.to_frame(name='score') - - -# the __all__ is generated -__all__ = ['MacdFactor', 'BullFactor', 'KeepBullFactor', 'LiveOrDeadFactor', 'GoldCrossFactor'] - -if __name__ == '__main__': - from zvt.contract import IntervalLevel - - f1 = LiveOrDeadFactor(level=IntervalLevel.LEVEL_1WEEK, start_timestamp='2018-01-01') - - df = f1.factor_df - print(df[(df['score'] > 0) & (df['timestamp'] == '2021-01-04')]) diff --git a/zvt/factors/target_selector.py b/zvt/factors/target_selector.py deleted file mode 100644 index cf1d7bcc..00000000 --- a/zvt/factors/target_selector.py +++ /dev/null @@ -1,214 +0,0 @@ -import operator -from enum import Enum -from itertools import accumulate -from typing import List - -import pandas as pd -from pandas import DataFrame - -from zvt.contract import IntervalLevel -from zvt.contract.drawer import Drawer -from zvt.contract.factor import FilterFactor, ScoreFactor, Factor -from zvt.domain.meta.stock_meta import Stock -from zvt.utils.pd_utils import index_df, pd_is_not_null -from zvt.utils.time_utils import to_pd_timestamp, now_pd_timestamp - - -class TargetType(Enum): - # open_long 代表开多,并应该平掉相应标的的空单 - open_long = 'open_long' - # open_short 代表开空,并应该平掉相应标的的多单 - open_short = 'open_short' - # 其他情况就是保持当前的持仓 - - -class TargetSelector(object): - def __init__(self, - entity_ids=None, - entity_schema=Stock, - exchanges=None, - codes=None, - the_timestamp=None, - start_timestamp=None, - end_timestamp=None, - long_threshold=0.8, - short_threshold=0.2, - level=IntervalLevel.LEVEL_1DAY, - provider=None) -> None: - self.entity_ids = entity_ids - self.entity_schema = entity_schema - self.exchanges = exchanges - self.codes = codes - self.provider = provider - - if the_timestamp: - self.the_timestamp = to_pd_timestamp(the_timestamp) - self.start_timestamp = self.the_timestamp - self.end_timestamp = self.the_timestamp - else: - if start_timestamp: - self.start_timestamp = to_pd_timestamp(start_timestamp) - if end_timestamp: - self.end_timestamp = to_pd_timestamp(end_timestamp) - else: - self.end_timestamp = now_pd_timestamp() - - self.long_threshold = long_threshold - self.short_threshold = short_threshold - self.level = level - - self.filter_factors: List[FilterFactor] = [] - self.score_factors: List[ScoreFactor] = [] - self.filter_result = None - self.score_result = None - - self.open_long_df: DataFrame = None - self.open_short_df: DataFrame = None - - self.init_factors(entity_ids=entity_ids, entity_schema=entity_schema, exchanges=exchanges, codes=codes, - the_timestamp=the_timestamp, start_timestamp=start_timestamp, end_timestamp=end_timestamp, - level=self.level) - - def init_factors(self, entity_ids, entity_schema, exchanges, codes, the_timestamp, start_timestamp, end_timestamp, - level): - pass - - def add_filter_factor(self, factor: FilterFactor): - self.check_factor(factor) - self.filter_factors.append(factor) - return self - - def add_score_factor(self, factor: ScoreFactor): - self.check_factor(factor) - self.score_factors.append(factor) - return self - - def check_factor(self, factor: Factor): - assert factor.level == self.level - - def move_on(self, to_timestamp=None, kdata_use_begin_time=False, timeout=20): - if self.score_factors: - for factor in self.score_factors: - factor.move_on(to_timestamp, timeout=timeout) - if self.filter_factors: - for factor in self.filter_factors: - factor.move_on(to_timestamp, timeout=timeout) - - self.run() - - def run(self): - """ - - """ - if self.filter_factors: - musts = [] - for factor in self.filter_factors: - df = factor.result_df - - if not pd_is_not_null(df): - raise Exception('no data for factor:{},{}'.format(factor.factor_name, factor)) - - if len(df.columns) > 1: - s = df.agg("and", axis="columns") - s.name = 'score' - musts.append(s.to_frame(name='score')) - else: - df.columns = ['score'] - musts.append(df) - - self.filter_result = list(accumulate(musts, func=operator.__and__))[-1] - - if self.score_factors: - scores = [] - for factor in self.score_factors: - df = factor.result_df - if not pd_is_not_null(df): - raise Exception('no data for factor:{},{}'.format(factor.factor_name, factor)) - - if len(df.columns) > 1: - s = df.agg("mean", axis="columns") - s.name = 'score' - scores.append(s.to_frame(name='score')) - else: - df.columns = ['score'] - scores.append(df) - self.score_result = list(accumulate(scores, func=operator.__add__))[-1] - - self.generate_targets() - - def get_targets(self, timestamp, target_type: TargetType = TargetType.open_long) -> pd.DataFrame: - if target_type == TargetType.open_long: - df = self.open_long_df - if target_type == TargetType.open_short: - df = self.open_short_df - - if pd_is_not_null(df): - if timestamp in df.index: - target_df = df.loc[[to_pd_timestamp(timestamp)], :] - return target_df['entity_id'].tolist() - return [] - - def get_open_long_targets(self, timestamp): - return self.get_targets(timestamp=timestamp, target_type=TargetType.open_long) - - def get_open_short_targets(self, timestamp): - return self.get_targets(timestamp=timestamp, target_type=TargetType.open_short) - - # overwrite it to generate targets - def generate_targets(self): - if pd_is_not_null(self.filter_result) and pd_is_not_null(self.score_result): - # for long - result1 = self.filter_result[self.filter_result.score] - result2 = self.score_result[self.score_result.score >= self.long_threshold] - long_result = result2.loc[result1.index, :] - # for short - result1 = self.filter_result[~self.filter_result.score] - result2 = self.score_result[self.score_result.score <= self.short_threshold] - short_result = result2.loc[result1.index, :] - elif pd_is_not_null(self.score_result): - long_result = self.score_result[self.score_result.score >= self.long_threshold] - short_result = self.score_result[self.score_result.score <= self.short_threshold] - else: - long_result = self.filter_result[self.filter_result.score == True] - short_result = self.filter_result[self.filter_result.score == False] - - self.open_long_df = self.normalize_result_df(long_result) - self.open_short_df = self.normalize_result_df(short_result) - - def get_result_df(self): - return self.open_long_df - - def normalize_result_df(self, df): - if pd_is_not_null(df): - df = df.reset_index() - df = index_df(df) - df = df.sort_values(by=['score', 'entity_id']) - return df - - def draw(self, - render='html', - file_name=None, - width=None, - height=None, - title=None, - keep_ui_state=True, - annotation_df=None, - target_type: TargetType = TargetType.open_long): - - if target_type == TargetType.open_long: - df = self.open_long_df.copy() - elif target_type == TargetType.open_short: - df = self.open_short_df.copy() - - df['target_type'] = target_type.value - - if pd_is_not_null(df): - df = df.reset_index(drop=False) - drawer = Drawer(df) - - drawer.draw_table(width=width, height=height, title=title, - keep_ui_state=keep_ui_state) - - -# the __all__ is generated -__all__ = ['TargetType', 'TargetSelector'] \ No newline at end of file diff --git a/zvt/factors/technical_factor.py b/zvt/factors/technical_factor.py deleted file mode 100644 index 6e52a5a3..00000000 --- a/zvt/factors/technical_factor.py +++ /dev/null @@ -1,65 +0,0 @@ -from typing import List, Union, Type - -import pandas as pd - -from zvt.api.quote import get_kdata_schema -from zvt.contract import IntervalLevel, EntityMixin, AdjustType -from zvt.contract.factor import Factor, Transformer, Accumulator, FactorMeta -from zvt.domain import Stock - - -class TechnicalFactor(Factor, metaclass=FactorMeta): - def __init__(self, - entity_schema: Type[EntityMixin] = Stock, - provider: str = None, - entity_provider: str = None, - entity_ids: List[str] = None, - exchanges: List[str] = None, - codes: List[str] = None, - the_timestamp: Union[str, pd.Timestamp] = None, - start_timestamp: Union[str, pd.Timestamp] = None, - end_timestamp: Union[str, pd.Timestamp] = None, - columns: List = None, - filters: List = None, - order: object = None, - limit: int = None, - level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY, - category_field: str = 'entity_id', - time_field: str = 'timestamp', - computing_window: int = None, - keep_all_timestamp: bool = False, - fill_method: str = 'ffill', - effective_number: int = None, - transformer: Transformer = None, - accumulator: Accumulator = None, - need_persist: bool = False, - dry_run: bool = False, - factor_name: str = None, - clear_state: bool = False, - not_load_data: bool = False, - adjust_type: Union[AdjustType, str] = None) -> None: - if columns is None: - columns = ['id', 'entity_id', 'timestamp', 'level', 'open', 'close', 'high', 'low'] - - # 股票默认使用后复权 - if entity_schema == Stock and not adjust_type: - adjust_type = AdjustType.hfq - - self.adjust_type = adjust_type - self.data_schema = get_kdata_schema(entity_schema.__name__, level=level, adjust_type=adjust_type) - - if not factor_name: - if type(level) == str: - factor_name = f'{type(self).__name__.lower()}_{level}' - else: - factor_name = f'{type(self).__name__.lower()}_{level.value}' - - super().__init__(self.data_schema, entity_schema, provider, entity_provider, entity_ids, exchanges, codes, - the_timestamp, start_timestamp, end_timestamp, columns, filters, order, limit, level, - category_field, time_field, computing_window, keep_all_timestamp, fill_method, - effective_number, transformer, accumulator, need_persist, dry_run, factor_name, clear_state, - not_load_data) - - -# the __all__ is generated -__all__ = ['TechnicalFactor'] diff --git a/zvt/factors/zen/domain/common.py b/zvt/factors/zen/domain/common.py deleted file mode 100644 index 61d84ebf..00000000 --- a/zvt/factors/zen/domain/common.py +++ /dev/null @@ -1,45 +0,0 @@ -# -*- coding: utf-8 -*- -from sqlalchemy import Column, Float, String, Boolean - -from zvt.contract import Mixin - - -class ZenFactorCommon(Mixin): - level = Column(String(length=32)) - # 开盘价 - open = Column(Float) - # 收盘价 - close = Column(Float) - # 最高价 - high = Column(Float) - # 最低价 - low = Column(Float) - # 成交量 - volume = Column(Float) - # 成交金额 - turnover = Column(Float) - - # 笔的底 - bi_di = Column(Boolean) - # 笔的顶 - bi_ding = Column(Boolean) - # 记录笔顶/底分型的值,bi_di取low,bi_ding取high,其他为None,绘图时取有值的连线即为 笔 - bi_value = Column(Float) - - # 记录临时分型,不变 - tmp_ding = Column(Boolean) - tmp_di = Column(Boolean) - - duan_state = Column(String(length=32)) - - # 段的底 - duan_di = Column(Boolean) - # 段的顶 - duan_ding = Column(Boolean) - # 记录段顶/底的值,为duan_di时取low,为duan_ding时取high,其他为None,绘图时取有值的连线即为 段 - duan_value = Column(Float) - - # 记录在确定中枢的最后一个段的终点x1,值为Rect(x0,y0,x1,y1) - zhongshu = Column(String(length=512)) -# the __all__ is generated -__all__ = ['ZenFactorCommon'] \ No newline at end of file diff --git a/zvt/factors/zen/zen_factor.py b/zvt/factors/zen/zen_factor.py deleted file mode 100644 index 9797d3c1..00000000 --- a/zvt/factors/zen/zen_factor.py +++ /dev/null @@ -1,588 +0,0 @@ -# -*- coding: utf-8 -*- -import json -import logging -from enum import Enum -from typing import List -from typing import Union, Optional, Type - -import numpy as np -import pandas as pd - -from zvt.contract import EntityMixin -from zvt.contract import IntervalLevel, AdjustType -from zvt.contract.api import get_schema_by_name -from zvt.contract.data_type import Bean -from zvt.contract.drawer import Rect -from zvt.contract.factor import Accumulator -from zvt.contract.factor import Transformer -from zvt.domain import Stock -from zvt.factors.algorithm import intersect -from zvt.factors.technical_factor import TechnicalFactor -from zvt.utils import pd_is_not_null, to_string -from zvt.utils import to_time_str -from zvt.utils.time_utils import TIME_FORMAT_ISO8601 - -logger = logging.getLogger(__name__) - - -class Direction(Enum): - up = 'up' - down = 'down' - - def opposite(self): - if self == Direction.up: - return Direction.down - if self == Direction.down: - return Direction.up - - -class Fenxing(Bean): - def __init__(self, state, kdata, index) -> None: - self.state = state - self.kdata = kdata - self.index = index - - -class KState(Enum): - # 顶分型 - bi_ding = 'bi_ding' - # 底分型 - bi_di = 'bi_di' - # 临时 - tmp_ding = 'tmp_ding' - tmp_di = 'tmp_di' - # 候选(candidate) - can_ding = 'can_ding' - can_di = 'can_di' - - -class DuanState(Enum): - up = 'up' - down = 'down' - # Bardo,中阴阶段,不定,变化,易 - yi = 'yi' - - -def fenxing_power(left, middle, right, fenxing='tmp_ding'): - if fenxing == 'tmp_ding': - a = middle['high'] - middle['close'] - b = middle['high'] - left['high'] - c = middle['high'] - right['high'] - return -(a + b + c) / middle['close'] - if fenxing == 'tmp_di': - a = abs(middle['low'] - middle['close']) - b = abs(middle['low'] - left['low']) - c = abs(middle['low'] - right['low']) - return (a + b + c) / middle['close'] - - -def a_include_b(a: pd.Series, b: pd.Series) -> bool: - """ - kdata a includes kdata b - - :param a: - :param b: - :return: - """ - return (a['high'] >= b['high']) and (a['low'] <= b['low']) - - -def is_including(kdata1, kdata2): - return a_include_b(kdata1, kdata2) or a_include_b(kdata2, kdata1) - - -def get_direction(kdata, pre_kdata, current=Direction.up) -> Direction: - if is_up(kdata, pre_kdata): - return Direction.up - if is_down(kdata, pre_kdata): - return Direction.down - - return current - - -def is_up(kdata, pre_kdata): - return kdata['high'] > pre_kdata['high'] - - -def is_down(kdata, pre_kdata): - return kdata['low'] < pre_kdata['low'] - - -def handle_first_fenxing(one_df, step=11): - if step >= len(one_df): - logger.info(f"coult not get fenxing by step {step}, len {len(one_df)}") - return None, None, None - - logger.info(f"try to get first fenxing by step {step}") - - df = one_df.iloc[:step] - ding_kdata = df[df['high'].max() == df['high']] - ding_index = int(ding_kdata.index[-1]) - - di_kdata = df[df['low'].min() == df['low']] - di_index = int(di_kdata.index[-1]) - - # 确定第一个分型 - if abs(ding_index - di_index) >= 4: - if ding_index > di_index: - fenxing = 'bi_di' - fenxing_index = di_index - one_df.loc[di_index, 'bi_di'] = True - # 确定第一个分型后,开始遍历的位置 - start_index = ding_index - # 目前的笔的方向,up代表寻找 can_ding;down代表寻找can_di - direction = Direction.up - else: - fenxing = 'bi_ding' - fenxing_index = ding_index - one_df.loc[ding_index, 'bi_ding'] = True - start_index = di_index - direction = Direction.down - return Fenxing(state=fenxing, index=fenxing_index, - kdata={ - 'low': float(one_df.loc[fenxing_index]['low']), - 'high': float(one_df.loc[fenxing_index]['high']) - }), start_index, direction - else: - logger.info("need add step") - return handle_first_fenxing(one_df, step=step + 1) - - -def handle_duan(fenxing_list: List[Fenxing], pre_duan_state='yi'): - state = fenxing_list[0].state - # 1笔区间 - bi1_start = fenxing_list[0].kdata - bi1_end = fenxing_list[1].kdata - # 3笔区间 - bi3_start = fenxing_list[2].kdata - bi3_end = fenxing_list[3].kdata - - if state == 'bi_ding': - # 向下段,下-上-下 - - # 第一笔区间 - range1 = (bi1_end['low'], bi1_start['high']) - # 第三笔区间 - range3 = (bi3_end['low'], bi3_start['high']) - - # 1,3有重叠,认为第一个段出现 - if intersect(range1, range3): - return 'down' - - else: - # 向上段,上-下-上 - - # 第一笔区间 - range1 = (bi1_start['low'], bi1_end['high']) - # 第三笔区间 - range3 = (bi3_start['low'], bi3_end['high']) - - # 1,3有重叠,认为第一个段出现 - if intersect(range1, range3): - return 'up' - - return pre_duan_state - - -def handle_including(one_df, index, kdata, pre_index, pre_kdata, tmp_direction: Direction): - # 改kdata - if a_include_b(kdata, pre_kdata): - # 长的kdata变短 - if tmp_direction == Direction.up: - one_df.loc[index, 'low'] = pre_kdata['low'] - else: - one_df.loc[index, 'high'] = pre_kdata['high'] - # 改pre_kdata - elif a_include_b(pre_kdata, kdata): - # 长的pre_kdata变短 - if tmp_direction == Direction.down: - one_df.loc[pre_index, 'low'] = kdata['low'] - else: - one_df.loc[pre_index, 'high'] = kdata['high'] - - -class FactorStateEncoder(json.JSONEncoder): - def default(self, object): - if isinstance(object, pd.Series): - return object.to_dict() - elif isinstance(object, pd.Timestamp): - return to_time_str(object, fmt=TIME_FORMAT_ISO8601) - elif isinstance(object, Enum): - return object.value - elif isinstance(object, Bean): - return object.dict() - else: - return super().default(object) - - -def decode_rect(dct): - return Rect(x0=dct['x0'], y0=dct['y0'], x1=dct['x1'], y1=dct['y1']) - - -def decode_fenxing(dct): - return Fenxing(state=dct['state'], kdata=dct['kdata'], index=dct['index']) - - -def get_zen_factor_schema(entity_type: str, - level: Union[IntervalLevel, str] = IntervalLevel.LEVEL_1DAY): - if type(level) == str: - level = IntervalLevel(level) - - # zen factor schema rule - # 1)name:{SecurityType.value.capitalize()}{IntervalLevel.value.upper()}ZenFactor - schema_str = '{}{}ZenFactor'.format(entity_type.capitalize(), level.value.capitalize()) - - return get_schema_by_name(schema_str) - - -@to_string -class ZenState(Bean): - def __init__(self, state: dict = None) -> None: - super().__init__() - - if not state: - state = dict() - - # 用于计算未完成段的分型 - self.fenxing_list = state.get('fenxing_list', []) - fenxing_list = [Fenxing(item['state'], item['kdata'], item['index']) for item in self.fenxing_list] - self.fenxing_list = fenxing_list - - # 目前的方向 - if state.get('direction'): - self.direction = Direction(state.get('direction')) - else: - self.direction = None - - # 候选分型(candidate) - self.can_fenxing = state.get('can_fenxing') - self.can_fenxing_index = state.get('can_fenxing_index') - # 反方向count - self.opposite_count = state.get('opposite_count', 0) - # 目前段的方向 - self.current_duan_state = state.get('current_duan_state', 'yi') - - # list of (timestamp,value) - self.duans = state.get('duans', []) - - -def handle_zhongshu(points: list, acc_df, end_index, zhongshu_col='zhongshu', zhongshu_change_col='zhongshu_change'): - zhongshu = None - zhongshu_change = None - - if len(points) == 4: - x1 = points[0][0] - x2 = points[3][0] - - if points[0][1] < points[1][1]: - # 向下段 - range = intersect((points[0][1], points[1][1]), - (points[2][1], points[3][1])) - if range: - y1, y2 = range - # 记录中枢 - zhongshu = Rect(x0=x1, x1=x2, y0=y1, y1=y2) - zhongshu_change = abs(y1 - y2) / y1 - acc_df.loc[end_index, zhongshu_col] = zhongshu - acc_df.loc[end_index, zhongshu_change_col] = zhongshu_change - points = points[-1:] - else: - points = points[1:] - else: - # 向上段 - range = intersect((points[1][1], points[0][1]), - (points[3][1], points[2][1])) - if range: - y1, y2 = range - # 记录中枢 - zhongshu = Rect(x0=x1, x1=x2, y0=y1, y1=y2) - zhongshu_change = abs(y1 - y2) / y1 - - acc_df.loc[end_index, zhongshu_col] = zhongshu - acc_df.loc[end_index, zhongshu_change_col] = zhongshu_change - points = points[-1:] - else: - points = points[1:] - return points, zhongshu, zhongshu_change - - -class ZenAccumulator(Accumulator): - def __init__(self, acc_window: int = 1) -> None: - """ - 算法和概念 - <实体> 某种状态的k线 - [实体] 连续实体排列 - - 两k线的关系有三种: 上涨,下跌,包含 - 上涨: k线高点比之前高,低点比之前高 - 下跌: k线低点比之前低,高点比之前低 - 包含: k线高点比之前高,低点比之前低;反方向,即被包含 - 处理包含关系,长的k线缩短,上涨时,低点取max(low1,low2);下跌时,高点取min(high1,high2) - - 第一个顶(底)分型: 出现连续4根下跌(上涨)k线 - 之后开始寻找 候选底(顶)分型,寻找的过程中有以下状态 - - <临时顶>: 中间k线比两边的高点高,是一条特定的k线 - <临时底>: 中间k线比两边的高点高,是一条特定的k线 - - <候选顶分型>: 连续的<临时顶>取最大 - <候选底分型>: 连续的<临时底>取最小 - 任何时刻只能有一个候选,其之前是一个确定的分型 - - <上升k线>: - <下降k线>: - <连接k线>: 分型之间的k线都可以认为是连接k线,以上为演化过程的中间态 - distance(<候选顶分型>, <连接k线>)>=4 则 <候选顶分型> 变成顶分型 - distance(<候选底分型>, <连接k线>)>=4 则 <候选底分型> 变成底分型 - - <顶分型><连接k线><候选底分型> - <底分型><连接k线><候选顶分型> - """ - super().__init__(acc_window) - - def acc_one(self, entity_id, df: pd.DataFrame, acc_df: pd.DataFrame, state: dict) -> (pd.DataFrame, dict): - self.logger.info(f'acc_one:{entity_id}') - if pd_is_not_null(acc_df): - df = df[df.index > acc_df.index[-1]] - if pd_is_not_null(df): - self.logger.info(f'compute from {df.iloc[0]["timestamp"]}') - # 遍历的开始位置 - start_index = len(acc_df) - - acc_df = pd.concat([acc_df, df]) - - zen_state = ZenState(state) - - acc_df = acc_df.reset_index(drop=True) - else: - self.logger.info('no need to compute') - return acc_df, state - else: - acc_df = df - # 笔的底 - acc_df['bi_di'] = False - # 笔的顶 - acc_df['bi_ding'] = False - # 记录笔顶/底分型的值,bi_di取low,bi_ding取high,其他为None,绘图时取有值的连线即为 笔 - acc_df['bi_value'] = np.NAN - - # 记录临时分型,不变 - acc_df['tmp_ding'] = False - acc_df['tmp_di'] = False - # 分型的力度 - acc_df['fenxing_power'] = np.NAN - - acc_df['duan_state'] = 'yi' - - # 段的底 - acc_df['duan_di'] = False - # 段的顶 - acc_df['duan_ding'] = False - # 记录段顶/底的值,为duan_di时取low,为duan_ding时取high,其他为None,绘图时取有值的连线即为 段 - acc_df['duan_value'] = np.NAN - - # 记录在确定中枢的最后一个段的终点x1,值为Rect(x0,y0,x1,y1) - acc_df['zhongshu'] = np.NAN - - acc_df = acc_df.reset_index(drop=True) - - zen_state = ZenState(dict(fenxing_list=[], direction=None, can_fenxing=None, can_fenxing_index=None, - opposite_count=0, current_duan_state='yi', duans=[], pre_bi=None, pre_duan=None)) - - zen_state.fenxing_list: List[Fenxing] = [] - - # 取前11条k线,至多出现一个顶分型+底分型 - # 注:只是一种方便的确定第一个分型的办法,有了第一个分型,后面的处理就比较统一 - # start_index 为遍历开始的位置 - # direction为一个确定分型后的方向,即顶分型后为:down,底分型后为:up - fenxing, start_index, direction = handle_first_fenxing(acc_df, step=11) - if not fenxing: - return None, None - - zen_state.fenxing_list.append(fenxing) - zen_state.direction = direction - - # list of (timestamp,value) - zen_state.duans = [] - - pre_kdata = acc_df.iloc[start_index - 1] - pre_index = start_index - 1 - - tmp_direction = zen_state.direction - - for index, kdata in acc_df.iloc[start_index:].iterrows(): - # print(f'timestamp: {kdata.timestamp}') - # 临时方向 - tmp_direction = get_direction(kdata, pre_kdata, current=tmp_direction) - - # 处理包含关系 - handle_including(one_df=acc_df, index=index, kdata=kdata, pre_index=pre_index, pre_kdata=pre_kdata, - tmp_direction=tmp_direction) - - # 根据方向,寻找对应的分型 和 段 - if zen_state.direction == Direction.up: - tmp_fenxing_col = 'tmp_ding' - fenxing_col = 'bi_ding' - else: - tmp_fenxing_col = 'tmp_di' - fenxing_col = 'bi_di' - - # 方向一致,延续中 - if tmp_direction == zen_state.direction: - zen_state.opposite_count = 0 - # 反向,寻找反 分型 - else: - zen_state.opposite_count = zen_state.opposite_count + 1 - # 第一次反向 - if zen_state.opposite_count == 1: - acc_df.loc[pre_index, tmp_fenxing_col] = True - acc_df.loc[pre_index, 'fenxing_power'] = fenxing_power(acc_df.loc[pre_index - 1], pre_kdata, kdata, - fenxing=tmp_fenxing_col) - - if zen_state.can_fenxing is not None: - # 候选底分型 - if tmp_direction == Direction.up: - # 取小的 - if pre_kdata['low'] <= zen_state.can_fenxing['low']: - zen_state.can_fenxing = pre_kdata[['low', 'high']] - zen_state.can_fenxing_index = pre_index - - # 候选顶分型 - else: - # 取大的 - if pre_kdata['high'] >= zen_state.can_fenxing['high']: - zen_state.can_fenxing = pre_kdata[['low', 'high']] - zen_state.can_fenxing_index = pre_index - else: - zen_state.can_fenxing = pre_kdata[['low', 'high']] - zen_state.can_fenxing_index = pre_index - - # 分型确立 - if zen_state.can_fenxing is not None: - if zen_state.opposite_count >= 4 or (index - zen_state.can_fenxing_index >= 8): - acc_df.loc[zen_state.can_fenxing_index, fenxing_col] = True - - # 记录笔的值 - if fenxing_col == 'bi_ding': - bi_value = acc_df.loc[zen_state.can_fenxing_index, 'high'] - else: - bi_value = acc_df.loc[zen_state.can_fenxing_index, 'low'] - acc_df.loc[zen_state.can_fenxing_index, 'bi_value'] = bi_value - - zen_state.pre_bi = (zen_state.can_fenxing_index, bi_value) - - zen_state.opposite_count = 0 - zen_state.direction = zen_state.direction.opposite() - zen_state.can_fenxing = None - - # 确定第一个段 - if zen_state.fenxing_list != None: - zen_state.fenxing_list.append( - Fenxing(state=fenxing_col, - kdata={ - 'low': float(acc_df.loc[zen_state.can_fenxing_index]['low']), - 'high': float(acc_df.loc[zen_state.can_fenxing_index]['high']) - }, - index=zen_state.can_fenxing_index)) - - if len(zen_state.fenxing_list) == 4: - duan_state = handle_duan(fenxing_list=zen_state.fenxing_list, - pre_duan_state=zen_state.current_duan_state) - - change = duan_state != zen_state.current_duan_state - - if change: - zen_state.current_duan_state = duan_state - - # 确定状态 - acc_df.loc[zen_state.fenxing_list[0].index:zen_state.fenxing_list[-1].index, - 'duan_state'] = zen_state.current_duan_state - - duan_index = zen_state.fenxing_list[0].index - if zen_state.current_duan_state == 'up': - acc_df.loc[duan_index, 'duan_di'] = True - duan_value = acc_df.loc[duan_index, 'low'] - else: - duan_index = zen_state.fenxing_list[0].index - acc_df.loc[duan_index, 'duan_ding'] = True - duan_value = acc_df.loc[duan_index, 'high'] - # 记录段的值 - acc_df.loc[duan_index, 'duan_value'] = duan_value - - # 记录用于计算中枢的段 - zen_state.duans.append((acc_df.loc[duan_index, 'timestamp'], duan_value)) - - # 计算中枢 - zen_state.duans, _, _ = handle_zhongshu(points=zen_state.duans, acc_df=acc_df, - end_index=duan_index, - zhongshu_col='zhongshu', - zhongshu_change_col='zhongshu_change') - - # 只留最后一个 - zen_state.fenxing_list = zen_state.fenxing_list[-1:] - else: - # 保持之前的状态并踢出候选 - acc_df.loc[ - zen_state.fenxing_list[0].index, 'duan_state'] = zen_state.current_duan_state - zen_state.fenxing_list = zen_state.fenxing_list[1:] - - pre_kdata = kdata - pre_index = index - - acc_df = acc_df.set_index('timestamp', drop=False) - return acc_df, zen_state - - -class ZenFactor(TechnicalFactor): - - def __init__(self, entity_schema: Type[EntityMixin] = Stock, provider: str = None, entity_provider: str = None, - entity_ids: List[str] = None, exchanges: List[str] = None, codes: List[str] = None, - the_timestamp: Union[str, pd.Timestamp] = None, start_timestamp: Union[str, pd.Timestamp] = None, - end_timestamp: Union[str, pd.Timestamp] = None, columns: List = None, filters: List = None, - order: object = None, limit: int = None, level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY, - category_field: str = 'entity_id', time_field: str = 'timestamp', computing_window: int = None, - keep_all_timestamp: bool = False, fill_method: str = 'ffill', effective_number: int = None, - transformer: Transformer = None, accumulator: Accumulator = ZenAccumulator(), - need_persist: bool = False, dry_run: bool = False, factor_name: str = None, clear_state: bool = False, - not_load_data: bool = False, - adjust_type: Union[AdjustType, str] = None) -> None: - self.factor_schema = get_zen_factor_schema(entity_type=entity_schema.__name__, level=level) - super().__init__(entity_schema, provider, entity_provider, entity_ids, exchanges, codes, the_timestamp, - start_timestamp, end_timestamp, columns, filters, order, limit, level, category_field, - time_field, computing_window, keep_all_timestamp, fill_method, effective_number, transformer, - accumulator, need_persist, dry_run, factor_name, clear_state, not_load_data, adjust_type) - - def factor_col_map_object_hook(self) -> dict: - return { - 'zhongshu': decode_rect - } - - def factor_encoder(self): - return FactorStateEncoder - - def drawer_factor_df_list(self) -> Optional[List[pd.DataFrame]]: - bi_value = self.factor_df[['bi_value']].dropna() - duan_value = self.factor_df[['duan_value']].dropna() - return [bi_value, duan_value] - - def drawer_rects(self) -> List[Rect]: - df = self.factor_df[['zhongshu']].dropna() - return df['zhongshu'].tolist() - - -if __name__ == '__main__': - zen = ZenFactor(entity_ids=['stock_sz_000338', 'stock_sz_000001'], level='1d', need_persist=True, clear_state=False) - - print(zen.factor_df) - - zen.move_on(timeout=1) - - print(zen.factor_df) - - zen.draw(show=True) - -# the __all__ is generated -__all__ = ['Direction', 'Fenxing', 'KState', 'DuanState', 'fenxing_power', 'a_include_b', 'is_including', - 'get_direction', 'is_up', 'is_down', 'handle_first_fenxing', 'handle_duan', 'handle_including', - 'FactorStateEncoder', 'decode_rect', 'decode_fenxing', 'get_zen_factor_schema', 'ZenState', - 'ZenAccumulator', 'ZenFactor'] diff --git a/zvt/fill_project.py b/zvt/fill_project.py deleted file mode 100644 index a923d744..00000000 --- a/zvt/fill_project.py +++ /dev/null @@ -1,35 +0,0 @@ -# script to auto generate some files -from zvt.autocode.generator import gen_kdata_schema, gen_exports -from zvt.contract import AdjustType -from zvt.contract import IntervalLevel - - -def gen_kdata_schemas(): - # 股票行情 - gen_kdata_schema(pkg='zvt', providers=['joinquant'], entity_type='stock', - levels=[level for level in IntervalLevel if level != IntervalLevel.LEVEL_TICK], - adjust_types=[None, AdjustType.hfq], entity_in_submodule=True) - - # 板块行情 - gen_kdata_schema(pkg='zvt', providers=['eastmoney'], entity_type='block', - levels=[IntervalLevel.LEVEL_1DAY, IntervalLevel.LEVEL_1WEEK, IntervalLevel.LEVEL_1MON], - entity_in_submodule=True) - - # etf行情 - gen_kdata_schema(pkg='zvt', providers=['sina'], entity_type='etf', - levels=[IntervalLevel.LEVEL_1DAY], entity_in_submodule=True) - # 指数行情 - gen_kdata_schema(pkg='zvt', providers=['joinquant', 'sina'], entity_type='index', - levels=[IntervalLevel.LEVEL_1DAY, IntervalLevel.LEVEL_1WEEK], entity_in_submodule=True) - - -if __name__ == '__main__': - # zip_dir(ZVT_TEST_DATA_PATH, zip_file_name=DATA_SAMPLE_ZIP_PATH) - # gen_exports('api') - # gen_exports('domain') - # gen_exports('informer') - # gen_exports('utils') - # gen_exports('trader') - # gen_exports('autocode') - gen_exports('factors') - # gen_kdata_schemas() diff --git a/zvt/recorders/__init__.py b/zvt/recorders/__init__.py deleted file mode 100644 index e399fab1..00000000 --- a/zvt/recorders/__init__.py +++ /dev/null @@ -1,137 +0,0 @@ -# -*- coding: utf-8 -*- -from zvt.contract.api import del_data -from zvt.recorders.eastmoney import * -from zvt.recorders.exchange import * -from zvt.recorders.joinquant import * -from zvt.recorders.sina import * - -CHINA_STOCK_MAIN_INDEX = [{'id': 'index_sh_000001', - 'entity_id': 'index_sh_000001', - 'code': '000001', - 'name': '上证指数', - 'timestamp': '1990-12-19', - 'exchange': 'sh', - 'entity_type': 'index', - 'category': 'main'}, - {'id': 'index_sh_000016', - 'entity_id': 'index_sh_000016', - 'code': '000016', - 'name': '上证50', - 'timestamp': '2004-01-02', - 'exchange': 'sh', - 'entity_type': 'index', - 'category': 'main'}, - {'id': 'index_sh_000905', - 'entity_id': 'index_sh_000905', - 'code': '000905', - 'name': '中证500', - 'timestamp': '2005-01-04', - 'exchange': 'sh', - 'entity_type': 'index', - 'category': 'main'}, - {'id': 'index_sz_399001', - 'entity_id': 'index_sz_399001', - 'code': '399001', - 'name': '深证成指', - 'timestamp': '1991-04-03', - 'exchange': 'sz', - 'entity_type': 'index', - 'category': 'main'}, - {'id': 'index_sz_399106', - 'entity_id': 'index_sz_399106', - 'code': '399106', - 'name': '深证综指', - 'timestamp': '1991-04-03', - 'exchange': 'sz', - 'entity_type': 'index', - 'category': 'main'}, - {'id': 'index_sz_399300', - 'entity_id': 'index_sz_399300', - 'code': '399300', - 'name': '沪深300', - 'timestamp': '2002-01-04', - 'exchange': 'sz', - 'entity_type': 'index', - 'category': 'main'}, - {'id': 'index_sz_399005', - 'entity_id': 'index_sz_399005', - 'code': '399005', - 'name': '中小板指', - 'timestamp': '2006-01-24', - 'exchange': 'sz', - 'entity_type': 'index', - 'category': 'main'}, - {'id': 'index_sz_399006', - 'entity_id': 'index_sz_399006', - 'code': '399006', - 'name': '创业板指', - 'timestamp': '2010-06-01', - 'exchange': 'sz', - 'entity_type': 'index', - 'category': 'main'}, - {'id': 'index_sh_000688', - 'entity_id': 'index_sh_000688', - 'code': '000688', - 'name': '科创50', - 'timestamp': '2019-01-01', - 'exchange': 'sh', - 'entity_type': 'index', - 'category': 'main'}, - # # 聚宽编码 - # # 市场通编码 市场通名称 - # # 310001 沪股通 - # # 310002 深股通 - # # 310003 港股通(沪) - # # 310004 港股通(深) - {'id': 'index_sz_310001', - 'entity_id': 'index_sz_310001', - 'code': '310001', - 'name': '沪股通', - 'timestamp': '2014-11-17', - 'exchange': 'sz', - 'entity_type': 'index', - 'category': 'main'}, - {'id': 'index_sz_310002', - 'entity_id': 'index_sz_310002', - 'code': '310002', - 'name': '深股通', - 'timestamp': '2014-11-17', - 'exchange': 'sz', - 'entity_type': 'index', - 'category': 'main'}, - {'id': 'index_sz_310003', - 'entity_id': 'index_sz_310003', - 'code': '310003', - 'name': '港股通(沪)', - 'timestamp': '2014-11-17', - 'exchange': 'sz', - 'entity_type': 'index', - 'category': 'main'}, - {'id': 'index_sz_310004', - 'entity_id': 'index_sz_310004', - 'code': '310004', - 'name': '港股通(深)', - 'timestamp': '2014-11-17', - 'exchange': 'sz', - 'entity_type': 'index', - 'category': 'main'} - ] - - -def init_main_index(provider='exchange'): - from zvt.utils.time_utils import to_pd_timestamp - import pandas as pd - from zvt.contract.api import df_to_db - from zvt.domain import Index - - for item in CHINA_STOCK_MAIN_INDEX: - item['timestamp'] = to_pd_timestamp(item['timestamp']) - df = pd.DataFrame(CHINA_STOCK_MAIN_INDEX) - # print(df) - df_to_db(df=df, data_schema=Index, provider=provider, force_update=False) - # delete dirty data - # del_data(data_schema=Index, provider=provider, filters=[Index.exchange == 'cn']) - - -init_main_index(provider='joinquant') -init_main_index(provider='exchange') diff --git a/zvt/recorders/eastmoney/__init__.py b/zvt/recorders/eastmoney/__init__.py deleted file mode 100644 index ccdb6ed1..00000000 --- a/zvt/recorders/eastmoney/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -from zvt.recorders.eastmoney.dividend_financing import * -from zvt.recorders.eastmoney.finance import * -from zvt.recorders.eastmoney.holder import * -from zvt.recorders.eastmoney.meta import * -from zvt.recorders.eastmoney.quotes import * -from zvt.recorders.eastmoney.trading import * diff --git a/zvt/recorders/eastmoney/dividend_financing/__init__.py b/zvt/recorders/eastmoney/dividend_financing/__init__.py deleted file mode 100644 index 77bb7846..00000000 --- a/zvt/recorders/eastmoney/dividend_financing/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -# -*- coding: utf-8 -*- -from zvt.recorders.eastmoney.dividend_financing.dividend_detail_recorder import * -from zvt.recorders.eastmoney.dividend_financing.dividend_financing_recorder import * -from zvt.recorders.eastmoney.dividend_financing.rights_issue_detail_recorder import * -from zvt.recorders.eastmoney.dividend_financing.spo_detail_recorder import * diff --git a/zvt/recorders/eastmoney/dividend_financing/rights_issue_detail_recorder.py b/zvt/recorders/eastmoney/dividend_financing/rights_issue_detail_recorder.py deleted file mode 100644 index 99285b75..00000000 --- a/zvt/recorders/eastmoney/dividend_financing/rights_issue_detail_recorder.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -from zvt.domain import RightsIssueDetail, DividendFinancing -from zvt.recorders.eastmoney.common import EastmoneyPageabeDataRecorder -from zvt.consts import SAMPLE_STOCK_CODES -from zvt.utils.pd_utils import pd_is_not_null -from zvt.utils.time_utils import now_pd_timestamp -from zvt.utils.utils import to_float - - -class RightsIssueDetailRecorder(EastmoneyPageabeDataRecorder): - data_schema = RightsIssueDetail - - url = 'https://emh5.eastmoney.com/api/FenHongRongZi/GetPeiGuMingXiList' - page_url = url - path_fields = ['PeiGuMingXiList'] - - def get_original_time_field(self): - return 'PeiGuGongGaoRi' - - def get_data_map(self): - return { - "rights_issues": ("ShiJiPeiGu", to_float), - "rights_issue_price": ("PeiGuJiaGe", to_float), - "rights_raising_fund": ("ShiJiMuJi", to_float) - } - - def on_finish(self): - last_year = str(now_pd_timestamp().year) - codes = [item.code for item in self.entities] - need_filleds = DividendFinancing.query_data(provider=self.provider, codes=codes, - return_type='domain', - session=self.session, - filters=[DividendFinancing.rights_raising_fund.is_(None)], - end_timestamp=last_year) - - for item in need_filleds: - df = RightsIssueDetail.query_data(provider=self.provider, entity_id=item.entity_id, - columns=[RightsIssueDetail.timestamp, - RightsIssueDetail.rights_raising_fund], - start_timestamp=item.timestamp, - end_timestamp="{}-12-31".format(item.timestamp.year)) - if pd_is_not_null(df): - item.rights_raising_fund = df['rights_raising_fund'].sum() - self.session.commit() - - super().on_finish() - - -__all__ = ['RightsIssueDetailRecorder'] - -if __name__ == '__main__': - # init_log('rights_issue.log') - - recorder = RightsIssueDetailRecorder(codes=SAMPLE_STOCK_CODES) - recorder.run() diff --git a/zvt/recorders/eastmoney/dividend_financing/spo_detail_recorder.py b/zvt/recorders/eastmoney/dividend_financing/spo_detail_recorder.py deleted file mode 100644 index 0422bb29..00000000 --- a/zvt/recorders/eastmoney/dividend_financing/spo_detail_recorder.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -from zvt.domain import SpoDetail, DividendFinancing -from zvt.recorders.eastmoney.common import EastmoneyPageabeDataRecorder -from zvt.utils.pd_utils import pd_is_not_null -from zvt.utils.time_utils import now_pd_timestamp -from zvt.utils.utils import to_float - - -class SPODetailRecorder(EastmoneyPageabeDataRecorder): - data_schema = SpoDetail - - url = 'https://emh5.eastmoney.com/api/FenHongRongZi/GetZengFaMingXiList' - page_url = url - path_fields = ['ZengFaMingXiList'] - - def get_original_time_field(self): - return 'ZengFaShiJian' - - def get_data_map(self): - return { - "spo_issues": ("ShiJiZengFa", to_float), - "spo_price": ("ZengFaJiaGe", to_float), - "spo_raising_fund": ("ShiJiMuJi", to_float) - } - - def on_finish(self): - last_year = str(now_pd_timestamp().year) - codes = [item.code for item in self.entities] - need_filleds = DividendFinancing.query_data(provider=self.provider, codes=codes, - return_type='domain', - session=self.session, - filters=[DividendFinancing.spo_raising_fund.is_(None)], - end_timestamp=last_year) - - for item in need_filleds: - df = SpoDetail.query_data(provider=self.provider, entity_id=item.entity_id, - columns=[SpoDetail.timestamp, SpoDetail.spo_raising_fund], - start_timestamp=item.timestamp, - end_timestamp="{}-12-31".format(item.timestamp.year)) - if pd_is_not_null(df): - item.spo_raising_fund = df['spo_raising_fund'].sum() - self.session.commit() - super().on_finish() - - -__all__ = ['SPODetailRecorder'] - -if __name__ == '__main__': - # init_log('spo_detail.log') - - recorder = SPODetailRecorder(codes=['000999']) - recorder.run() diff --git a/zvt/recorders/eastmoney/finance/__init__.py b/zvt/recorders/eastmoney/finance/__init__.py deleted file mode 100644 index 3e5b7115..00000000 --- a/zvt/recorders/eastmoney/finance/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -# -*- coding: utf-8 -*- -from zvt.recorders.eastmoney.finance.china_stock_balance_sheet_recorder import * -from zvt.recorders.eastmoney.finance.china_stock_cash_flow_recorder import * -from zvt.recorders.eastmoney.finance.china_stock_finance_factor_recorder import * -from zvt.recorders.eastmoney.finance.china_stock_income_statement_recorder import * diff --git a/zvt/recorders/eastmoney/finance/base_china_stock_finance_recorder.py b/zvt/recorders/eastmoney/finance/base_china_stock_finance_recorder.py deleted file mode 100644 index a71a95f8..00000000 --- a/zvt/recorders/eastmoney/finance/base_china_stock_finance_recorder.py +++ /dev/null @@ -1,169 +0,0 @@ -# -*- coding: utf-8 -*- -import pandas as pd - -from jqdatapy.api import get_fundamentals -from zvt.api.quote import to_jq_report_period -from zvt.contract.api import get_data -from zvt.domain import FinanceFactor -from zvt.recorders.eastmoney.common import company_type_flag, get_fc, EastmoneyTimestampsDataRecorder, \ - call_eastmoney_api, get_from_path_fields -from zvt.recorders.joinquant.common import to_jq_entity_id -from zvt.utils.pd_utils import index_df -from zvt.utils.pd_utils import pd_is_not_null -from zvt.utils.time_utils import to_time_str, to_pd_timestamp - - -class BaseChinaStockFinanceRecorder(EastmoneyTimestampsDataRecorder): - finance_report_type = None - data_type = 1 - - timestamps_fetching_url = 'https://emh5.eastmoney.com/api/CaiWuFenXi/GetCompanyReportDateList' - timestamp_list_path_fields = ['CompanyReportDateList'] - timestamp_path_fields = ['ReportDate'] - - def __init__(self, entity_type='stock', exchanges=['sh', 'sz'], entity_ids=None, codes=None, day_data=False, batch_size=10, - force_update=False, sleeping_time=5, default_size=2000, real_time=False, - fix_duplicate_way='add', start_timestamp=None, end_timestamp=None, close_hour=0, - close_minute=0) -> None: - super().__init__(entity_type, exchanges, entity_ids, codes, day_data, batch_size, force_update, sleeping_time, - default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour, - close_minute) - - try: - self.fetch_jq_timestamp = True - except Exception as e: - self.fetch_jq_timestamp = False - self.logger.warning( - f'joinquant account not ok,the timestamp(publish date) for finance would be not correct', e) - - def init_timestamps(self, entity): - param = { - "color": "w", - "fc": get_fc(entity), - "DataType": self.data_type - } - - if self.finance_report_type == 'LiRunBiaoList' or self.finance_report_type == 'XianJinLiuLiangBiaoList': - param['ReportType'] = 1 - - timestamp_json_list = call_eastmoney_api(url=self.timestamps_fetching_url, - path_fields=self.timestamp_list_path_fields, - param=param) - - if self.timestamp_path_fields: - timestamps = [get_from_path_fields(data, self.timestamp_path_fields) for data in timestamp_json_list] - - return [to_pd_timestamp(t) for t in timestamps] - - def generate_request_param(self, security_item, start, end, size, timestamps): - if len(timestamps) <= 10: - param = { - "color": "w", - "fc": get_fc(security_item), - "corpType": company_type_flag(security_item), - # 0 means get all types - "reportDateType": 0, - "endDate": '', - "latestCount": size - } - else: - param = { - "color": "w", - "fc": get_fc(security_item), - "corpType": company_type_flag(security_item), - # 0 means get all types - "reportDateType": 0, - "endDate": to_time_str(timestamps[10]), - "latestCount": 10 - } - - if self.finance_report_type == 'LiRunBiaoList' or self.finance_report_type == 'XianJinLiuLiangBiaoList': - param['reportType'] = 1 - - return param - - def generate_path_fields(self, security_item): - comp_type = company_type_flag(security_item) - - if comp_type == "3": - return ['{}_YinHang'.format(self.finance_report_type)] - elif comp_type == "2": - return ['{}_BaoXian'.format(self.finance_report_type)] - elif comp_type == "1": - return ['{}_QuanShang'.format(self.finance_report_type)] - elif comp_type == "4": - return ['{}_QiYe'.format(self.finance_report_type)] - - def record(self, entity, start, end, size, timestamps): - # different with the default timestamps handling - param = self.generate_request_param(entity, start, end, size, timestamps) - self.logger.info('request param:{}'.format(param)) - - return self.api_wrapper.request(url=self.url, param=param, method=self.request_method, - path_fields=self.generate_path_fields(entity)) - - def get_original_time_field(self): - return 'ReportDate' - - def fill_timestamp_with_jq(self, security_item, the_data): - # get report published date from jq - try: - df = get_fundamentals(table='indicator', code=to_jq_entity_id(security_item), columns='pubDate', - date=to_jq_report_period(the_data.report_date), count=None, parse_dates=['pubDate']) - if pd_is_not_null(df): - the_data.timestamp = to_pd_timestamp(df['pubDate'][0]) - self.logger.info( - 'jq fill {} {} timestamp:{} for report_date:{}'.format(self.data_schema, security_item.id, - the_data.timestamp, - the_data.report_date)) - self.session.commit() - except Exception as e: - self.logger.error(e) - - def on_finish_entity(self, entity): - super().on_finish_entity(entity) - - if not self.fetch_jq_timestamp: - return - - # fill the timestamp for report published date - the_data_list = get_data(data_schema=self.data_schema, - provider=self.provider, - entity_id=entity.id, - order=self.data_schema.timestamp.asc(), - return_type='domain', - session=self.session, - filters=[self.data_schema.timestamp == self.data_schema.report_date, - self.data_schema.timestamp >= to_pd_timestamp('2005-01-01')]) - if the_data_list: - if self.data_schema == FinanceFactor: - for the_data in the_data_list: - self.fill_timestamp_with_jq(entity, the_data) - else: - df = FinanceFactor.query_data(entity_id=entity.id, - columns=[FinanceFactor.timestamp, FinanceFactor.report_date, - FinanceFactor.id], - filters=[FinanceFactor.timestamp != FinanceFactor.report_date, - FinanceFactor.timestamp >= to_pd_timestamp('2005-01-01'), - FinanceFactor.report_date >= the_data_list[0].report_date, - FinanceFactor.report_date <= the_data_list[-1].report_date, ]) - - if pd_is_not_null(df): - index_df(df, index='report_date', time_field='report_date') - - for the_data in the_data_list: - if (df is not None) and (not df.empty) and the_data.report_date in df.index: - the_data.timestamp = df.at[the_data.report_date, 'timestamp'] - self.logger.info( - 'db fill {} {} timestamp:{} for report_date:{}'.format(self.data_schema, entity.id, - the_data.timestamp, - the_data.report_date)) - self.session.commit() - else: - # self.logger.info( - # 'waiting jq fill {} {} timestamp:{} for report_date:{}'.format(self.data_schema, - # security_item.id, - # the_data.timestamp, - # the_data.report_date)) - - self.fill_timestamp_with_jq(entity, the_data) diff --git a/zvt/recorders/eastmoney/holder/__init__.py b/zvt/recorders/eastmoney/holder/__init__.py deleted file mode 100644 index 124b142e..00000000 --- a/zvt/recorders/eastmoney/holder/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# -*- coding: utf-8 -*- -from zvt.recorders.eastmoney.holder.top_ten_holder_recorder import * -from zvt.recorders.eastmoney.holder.top_ten_tradable_holder_recorder import * diff --git a/zvt/recorders/eastmoney/holder/top_ten_tradable_holder_recorder.py b/zvt/recorders/eastmoney/holder/top_ten_tradable_holder_recorder.py deleted file mode 100644 index 3bc48951..00000000 --- a/zvt/recorders/eastmoney/holder/top_ten_tradable_holder_recorder.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -from zvt.domain import TopTenTradableHolder -from zvt.recorders.eastmoney.holder.top_ten_holder_recorder import TopTenHolderRecorder - - -class TopTenTradableHolderRecorder(TopTenHolderRecorder): - provider = 'eastmoney' - data_schema = TopTenTradableHolder - - url = 'https://emh5.eastmoney.com/api/GuBenGuDong/GetShiDaLiuTongGuDong' - path_fields = ['ShiDaLiuTongGuDongList'] - timestamps_fetching_url = 'https://emh5.eastmoney.com/api/GuBenGuDong/GetFirstRequest2Data' - timestamp_list_path_fields = ['SDLTGDBGQ', 'ShiDaLiuTongGuDongBaoGaoQiList'] - timestamp_path_fields = ['BaoGaoQi'] - - -__all__ = ['TopTenTradableHolderRecorder'] - -if __name__ == '__main__': - # init_log('top_ten_tradable_holder.log') - - TopTenTradableHolderRecorder(codes=['002572']).run() diff --git a/zvt/recorders/eastmoney/meta/__init__.py b/zvt/recorders/eastmoney/meta/__init__.py deleted file mode 100644 index bb38255c..00000000 --- a/zvt/recorders/eastmoney/meta/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# -*- coding: utf-8 -*- -from zvt.recorders.eastmoney.meta.china_stock_category_recorder import * -from zvt.recorders.eastmoney.meta.china_stock_meta_recorder import * diff --git a/zvt/recorders/eastmoney/meta/china_stock_meta_recorder.py b/zvt/recorders/eastmoney/meta/china_stock_meta_recorder.py deleted file mode 100644 index c2f0442b..00000000 --- a/zvt/recorders/eastmoney/meta/china_stock_meta_recorder.py +++ /dev/null @@ -1,97 +0,0 @@ -# -*- coding: utf-8 -*- - -import requests - -from zvt.contract.recorder import Recorder -from zvt.utils.time_utils import to_pd_timestamp -from zvt.utils.utils import to_float, pct_to_float -from zvt.contract.api import get_entities -from zvt.domain.meta.stock_meta import StockDetail, Stock -from zvt.recorders.exchange.china_stock_list_spider import ExchangeChinaStockListRecorder - - -class EastmoneyChinaStockListRecorder(ExchangeChinaStockListRecorder): - data_schema = Stock - provider = 'eastmoney' - - -class EastmoneyChinaStockDetailRecorder(Recorder): - provider = 'eastmoney' - data_schema = StockDetail - - def __init__(self, batch_size=10, force_update=False, sleeping_time=5, codes=None) -> None: - super().__init__(batch_size, force_update, sleeping_time) - - # get list at first - EastmoneyChinaStockListRecorder().run() - - self.codes = codes - if not self.force_update: - self.entities = get_entities(session=self.session, - entity_type='stock_detail', - exchanges=['sh', 'sz'], - codes=self.codes, - filters=[StockDetail.profile.is_(None)], - return_type='domain', - provider=self.provider) - - def run(self): - for security_item in self.entities: - assert isinstance(security_item, StockDetail) - - if security_item.exchange == 'sh': - fc = "{}01".format(security_item.code) - if security_item.exchange == 'sz': - fc = "{}02".format(security_item.code) - - # 基本资料 - param = {"color": "w", "fc": fc, "SecurityCode": "SZ300059"} - resp = requests.post('https://emh5.eastmoney.com/api/GongSiGaiKuang/GetJiBenZiLiao', json=param) - resp.encoding = 'utf8' - - resp_json = resp.json()['Result']['JiBenZiLiao'] - - security_item.profile = resp_json['CompRofile'] - security_item.main_business = resp_json['MainBusiness'] - security_item.date_of_establishment = to_pd_timestamp(resp_json['FoundDate']) - - # 关联行业 - industries = ','.join(resp_json['Industry'].split('-')) - security_item.industries = industries - - # 关联概念 - security_item.concept_indices = resp_json['Block'] - - # 关联地区 - security_item.area_indices = resp_json['Provice'] - - self.sleep() - - # 发行相关 - param = {"color": "w", "fc": fc} - resp = requests.post('https://emh5.eastmoney.com/api/GongSiGaiKuang/GetFaXingXiangGuan', json=param) - resp.encoding = 'utf8' - - resp_json = resp.json()['Result']['FaXingXiangGuan'] - - security_item.issue_pe = to_float(resp_json['PEIssued']) - security_item.price = to_float(resp_json['IssuePrice']) - security_item.issues = to_float(resp_json['ShareIssued']) - security_item.raising_fund = to_float((resp_json['NetCollection'])) - security_item.net_winning_rate = pct_to_float(resp_json['LotRateOn']) - - self.session.commit() - - self.logger.info('finish recording stock meta for:{}'.format(security_item.code)) - - self.sleep() - - -__all__ = ['EastmoneyChinaStockListRecorder', 'EastmoneyChinaStockDetailRecorder'] - -if __name__ == '__main__': - # init_log('china_stock_meta.log') - - # recorder = EastmoneyChinaStockDetailRecorder() - # recorder.run() - StockDetail.record_data(codes=['000338', '000777'], provider='eastmoney') diff --git a/zvt/recorders/eastmoney/quotes/__init__.py b/zvt/recorders/eastmoney/quotes/__init__.py deleted file mode 100644 index f0f87e39..00000000 --- a/zvt/recorders/eastmoney/quotes/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# -*- coding: utf-8 -*- -from zvt.recorders.eastmoney.quotes.china_stock_kdata_recorder import * diff --git a/zvt/recorders/eastmoney/quotes/china_stock_kdata_recorder.py b/zvt/recorders/eastmoney/quotes/china_stock_kdata_recorder.py deleted file mode 100644 index e6f33506..00000000 --- a/zvt/recorders/eastmoney/quotes/china_stock_kdata_recorder.py +++ /dev/null @@ -1,98 +0,0 @@ -# -*- coding: utf-8 -*- - -import requests - -from zvt.contract import IntervalLevel -from zvt.contract.api import get_entities -from zvt.contract.api import get_db_session -from zvt.contract.recorder import FixedCycleDataRecorder -from zvt.utils.time_utils import to_pd_timestamp, now_time_str, TIME_FORMAT_DAY1 -from zvt.utils.utils import json_callback_param, to_float -from zvt.api.quote import generate_kdata_id, get_kdata_schema -from zvt.domain import Index, BlockCategory, Block - - -def level_flag(level: IntervalLevel): - level = IntervalLevel(level) - if level == IntervalLevel.LEVEL_1DAY: - return 101 - if level == IntervalLevel.LEVEL_1WEEK: - return 102 - if level == IntervalLevel.LEVEL_1MON: - return 103 - - assert False - - -# 抓取行业的日线,周线,月线数据,用于中期选行业 -class ChinaStockKdataRecorder(FixedCycleDataRecorder): - entity_provider: str = 'eastmoney' - entity_schema = Block - - provider = 'eastmoney' - url = 'https://push2his.eastmoney.com/api/qt/stock/kline/get?secid=90.{}&cb=fsdata1567673076&klt={}&fqt=0&lmt={}&end={}&iscca=1&fields1=f1%2Cf2%2Cf3%2Cf4%2Cf5&fields2=f51%2Cf52%2Cf53%2Cf54%2Cf55%2Cf56%2Cf57&ut=f057cbcbce2a86e2866ab8877db1d059&forcect=1&fsdata1567673076=fsdata1567673076' - - def __init__(self, entity_type='index', exchanges=None, entity_ids=None, codes=None, day_data=False, batch_size=10, - force_update=False, sleeping_time=10, default_size=10000, real_time=True, fix_duplicate_way='add', - start_timestamp=None, end_timestamp=None, - level=IntervalLevel.LEVEL_1WEEK, kdata_use_begin_time=False, close_hour=0, close_minute=0, - one_day_trading_minutes=24 * 60) -> None: - self.data_schema = get_kdata_schema(entity_type=entity_type, level=level) - super().__init__(entity_type, exchanges, entity_ids, codes, day_data, batch_size, force_update, sleeping_time, - default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour, - close_minute, level, kdata_use_begin_time, - one_day_trading_minutes) - - def init_entities(self): - self.entity_session = get_db_session(provider=self.entity_provider, data_schema=self.entity_schema) - - self.entities = get_entities(session=self.entity_session, entity_type='index', - exchanges=self.exchanges, - codes=self.codes, - entity_ids=self.entity_ids, - return_type='domain', provider=self.provider, - # 只抓概念和行业 - filters=[Index.category.in_( - [BlockCategory.industry.value, BlockCategory.concept.value])]) - - def record(self, entity, start, end, size, timestamps): - the_url = self.url.format("{}".format(entity.code), level_flag(self.level), size, - now_time_str(fmt=TIME_FORMAT_DAY1)) - - resp = requests.get(the_url) - results = json_callback_param(resp.text) - - kdatas = [] - - if results: - klines = results['data']['klines'] - - # TODO: ignore the last unfinished kdata now,could control it better if need - for result in klines[:-1]: - # "2000-01-28,1005.26,1012.56,1173.12,982.13,3023326,3075552000.00" - # time,open,close,high,low,volume,turnover - fields = result.split(',') - the_timestamp = to_pd_timestamp(fields[0]) - - the_id = generate_kdata_id(entity_id=entity.id, timestamp=the_timestamp, level=self.level) - - kdatas.append(dict(id=the_id, - timestamp=the_timestamp, - entity_id=entity.id, - code=entity.code, - name=entity.name, - level=self.level.value, - open=to_float(fields[1]), - close=to_float(fields[2]), - high=to_float(fields[3]), - low=to_float(fields[4]), - volume=to_float(fields[5]), - turnover=to_float(fields[6]))) - return kdatas - - -__all__ = ['ChinaStockKdataRecorder'] - -if __name__ == '__main__': - recorder = ChinaStockKdataRecorder(level=IntervalLevel.LEVEL_1MON) - recorder.run() diff --git a/zvt/recorders/eastmoney/trading/__init__.py b/zvt/recorders/eastmoney/trading/__init__.py deleted file mode 100644 index 087469a2..00000000 --- a/zvt/recorders/eastmoney/trading/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# -*- coding: utf-8 -*- -from zvt.recorders.eastmoney.trading.holder_trading_recorder import * -from zvt.recorders.eastmoney.trading.manager_trading_recorder import * diff --git a/zvt/recorders/exchange/__init__.py b/zvt/recorders/exchange/__init__.py deleted file mode 100644 index b2cc9981..00000000 --- a/zvt/recorders/exchange/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from zvt.recorders.exchange.china_etf_list_spider import * -from zvt.recorders.exchange.china_index_list_spider import * -from zvt.recorders.exchange.china_stock_list_spider import * diff --git a/zvt/recorders/exchange/china_etf_list_spider.py b/zvt/recorders/exchange/china_etf_list_spider.py deleted file mode 100644 index c1ed486a..00000000 --- a/zvt/recorders/exchange/china_etf_list_spider.py +++ /dev/null @@ -1,215 +0,0 @@ -# -*- coding: utf-8 -*- - -import io -import re - -import demjson -import pandas as pd -import requests - -from zvt.contract.api import df_to_db -from zvt.contract.recorder import Recorder -from zvt.utils.time_utils import now_pd_timestamp -from zvt.api.quote import china_stock_code_to_id -from zvt.domain import EtfStock, BlockCategory, Etf -from zvt.recorders.consts import DEFAULT_SH_ETF_LIST_HEADER - - -class ChinaETFListSpider(Recorder): - data_schema = EtfStock - - def __init__(self, batch_size=10, force_update=False, sleeping_time=10.0, provider='exchange') -> None: - self.provider = provider - super().__init__(batch_size, force_update, sleeping_time) - - def run(self): - # 抓取沪市 ETF 列表 - url = 'http://query.sse.com.cn/commonQuery.do?sqlId=COMMON_SSE_ZQPZ_ETFLB_L_NEW' - response = requests.get(url, headers=DEFAULT_SH_ETF_LIST_HEADER) - response_dict = demjson.decode(response.text) - - df = pd.DataFrame(response_dict.get('result', [])) - self.persist_etf_list(df, exchange='sh') - self.logger.info('沪市 ETF 列表抓取完成...') - - # 抓取沪市 ETF 成分股 - self.download_sh_etf_component(df) - self.logger.info('沪市 ETF 成分股抓取完成...') - - # 抓取深市 ETF 列表 - url = 'http://www.szse.cn/api/report/ShowReport?SHOWTYPE=xlsx&CATALOGID=1945' - response = requests.get(url) - - df = pd.read_excel(io.BytesIO(response.content), dtype=str) - self.persist_etf_list(df, exchange='sz') - self.logger.info('深市 ETF 列表抓取完成...') - - # 抓取深市 ETF 成分股 - self.download_sz_etf_component(df) - self.logger.info('深市 ETF 成分股抓取完成...') - - def persist_etf_list(self, df: pd.DataFrame, exchange: str): - if df is None: - return - - df = df.copy() - if exchange == 'sh': - df = df[['FUND_ID', 'FUND_NAME']] - elif exchange == 'sz': - df = df[['证券代码', '证券简称']] - - df.columns = ['code', 'name'] - df['id'] = df['code'].apply(lambda code: f'etf_{exchange}_{code}') - df['entity_id'] = df['id'] - df['exchange'] = exchange - df['entity_type'] = 'etf' - df['category'] = BlockCategory.etf.value - - df = df.dropna(axis=0, how='any') - df = df.drop_duplicates(subset='id', keep='last') - - df_to_db(df=df, data_schema=Etf, provider=self.provider, force_update=False) - - def download_sh_etf_component(self, df: pd.DataFrame): - """ - ETF_CLASS => 1. 单市场 ETF 2.跨市场 ETF 3. 跨境 ETF - 5. 债券 ETF 6. 黄金 ETF - :param df: ETF 列表数据 - :return: None - """ - query_url = 'http://query.sse.com.cn/infodisplay/queryConstituentStockInfo.do?' \ - 'isPagination=false&type={}&etfClass={}' - - etf_df = df[(df['ETF_CLASS'] == '1') | (df['ETF_CLASS'] == '2')] - etf_df = self.populate_sh_etf_type(etf_df) - - for _, etf in etf_df.iterrows(): - url = query_url.format(etf['ETF_TYPE'], etf['ETF_CLASS']) - response = requests.get(url, headers=DEFAULT_SH_ETF_LIST_HEADER) - response_dict = demjson.decode(response.text) - response_df = pd.DataFrame(response_dict.get('result', [])) - - etf_code = etf['FUND_ID'] - etf_id = f'etf_sh_{etf_code}' - response_df = response_df[['instrumentId', 'instrumentName']].copy() - response_df.rename(columns={'instrumentId': 'stock_code', 'instrumentName': 'stock_name'}, inplace=True) - - response_df['entity_id'] = etf_id - response_df['entity_type'] = 'etf' - response_df['exchange'] = 'sh' - response_df['code'] = etf_code - response_df['name'] = etf['FUND_NAME'] - response_df['timestamp'] = now_pd_timestamp() - - response_df['stock_id'] = response_df['stock_code'].apply(lambda code: china_stock_code_to_id(code)) - response_df['id'] = response_df['stock_id'].apply( - lambda x: f'{etf_id}_{x}') - - df_to_db(data_schema=self.data_schema, df=response_df, provider=self.provider) - self.logger.info(f'{etf["FUND_NAME"]} - {etf_code} 成分股抓取完成...') - - self.sleep() - - def download_sz_etf_component(self, df: pd.DataFrame): - query_url = 'http://vip.stock.finance.sina.com.cn/corp/go.php/vII_NewestComponent/indexid/{}.phtml' - - self.parse_sz_etf_underlying_index(df) - for _, etf in df.iterrows(): - underlying_index = etf['拟合指数'] - etf_code = etf['证券代码'] - - if len(underlying_index) == 0: - self.logger.info(f'{etf["证券简称"]} - {etf_code} 非 A 股市场指数,跳过...') - continue - - url = query_url.format(underlying_index) - response = requests.get(url) - response.encoding = 'gbk' - - try: - dfs = pd.read_html(response.text, header=1) - except ValueError as error: - self.logger.error(f'HTML parse error: {error}, response: {response.text}') - continue - - if len(dfs) < 4: - continue - - response_df = dfs[3].copy() - response_df = response_df.dropna(axis=1, how='any') - response_df['品种代码'] = response_df['品种代码'].apply(lambda x: f'{x:06d}') - - etf_id = f'etf_sz_{etf_code}' - response_df = response_df[['品种代码', '品种名称']].copy() - response_df.rename(columns={'品种代码': 'stock_code', '品种名称': 'stock_name'}, inplace=True) - - response_df['entity_id'] = etf_id - response_df['entity_type'] = 'etf' - response_df['exchange'] = 'sz' - response_df['code'] = etf_code - response_df['name'] = etf['证券简称'] - response_df['timestamp'] = now_pd_timestamp() - - response_df['stock_id'] = response_df['stock_code'].apply(lambda code: china_stock_code_to_id(code)) - response_df['id'] = response_df['stock_id'].apply( - lambda x: f'{etf_id}_{x}') - - df_to_db(data_schema=self.data_schema, df=response_df, provider=self.provider) - self.logger.info(f'{etf["证券简称"]} - {etf_code} 成分股抓取完成...') - - self.sleep() - - @staticmethod - def populate_sh_etf_type(df: pd.DataFrame): - """ - 填充沪市 ETF 代码对应的 TYPE 到列表数据中 - :param df: ETF 列表数据 - :return: 包含 ETF 对应 TYPE 的列表数据 - """ - query_url = 'http://query.sse.com.cn/infodisplay/queryETFNewAllInfo.do?' \ - 'isPagination=false&type={}&pageHelp.pageSize=25' - - type_df = pd.DataFrame() - for etf_class in [1, 2]: - url = query_url.format(etf_class) - response = requests.get(url, headers=DEFAULT_SH_ETF_LIST_HEADER) - response_dict = demjson.decode(response.text) - response_df = pd.DataFrame(response_dict.get('result', [])) - response_df = response_df[['fundid1', 'etftype']] - - type_df = pd.concat([type_df, response_df]) - - result_df = df.copy() - result_df = result_df.sort_values(by='FUND_ID').reset_index(drop=True) - type_df = type_df.sort_values(by='fundid1').reset_index(drop=True) - - result_df['ETF_TYPE'] = type_df['etftype'] - - return result_df - - @staticmethod - def parse_sz_etf_underlying_index(df: pd.DataFrame): - """ - 解析深市 ETF 对应跟踪的指数代码 - :param df: ETF 列表数据 - :return: 解析完成 ETF 对应指数代码的列表数据 - """ - - def parse_index(text): - if len(text) == 0: - return '' - - result = re.search(r"(\d+).*", text) - if result is None: - return '' - else: - return result.group(1) - - df['拟合指数'] = df['拟合指数'].apply(parse_index) - - -__all__ = ['ChinaETFListSpider'] - -if __name__ == '__main__': - spider = ChinaETFListSpider(provider='exchange') - spider.run() diff --git a/zvt/recorders/exchange/china_index_list_spider.py b/zvt/recorders/exchange/china_index_list_spider.py deleted file mode 100644 index dafc1991..00000000 --- a/zvt/recorders/exchange/china_index_list_spider.py +++ /dev/null @@ -1,262 +0,0 @@ -# -*- coding: utf-8 -*- - -import io - -import demjson -import pandas as pd -import requests - -from zvt.contract.api import df_to_db -from zvt.contract.recorder import Recorder -from zvt.utils.time_utils import to_pd_timestamp, now_pd_timestamp -from zvt.api.quote import china_stock_code_to_id -from zvt.domain import IndexStock, Index - - -class ChinaIndexListSpider(Recorder): - data_schema = IndexStock - - def __init__(self, batch_size=10, force_update=False, sleeping_time=2.0, provider='exchange') -> None: - self.provider = provider - super().__init__(batch_size, force_update, sleeping_time) - - def run(self): - # 上证、中证 - self.fetch_csi_index() - - # 深证 - self.fetch_szse_index() - - # 国证 - # FIXME:已不可用 - # self.fetch_cni_index() - - def fetch_csi_index(self) -> None: - """ - 抓取上证、中证指数列表 - """ - url = 'http://www.csindex.com.cn/zh-CN/indices/index' \ - '?page={}&page_size={}&data_type=json&class_1=1&class_2=2&class_7=7&class_10=10' - - index_list = [] - page = 1 - page_size = 50 - while True: - query_url = url.format(page, page_size) - response = requests.get(query_url) - response_dict = demjson.decode(response.text) - response_index_list = response_dict.get('list', []) - - if len(response_index_list) == 0: - break - - index_list.extend(response_index_list) - - self.logger.info(f'上证、中证指数第 {page} 页抓取完成...') - page += 1 - self.sleep() - - df = pd.DataFrame(index_list) - df = df[['base_date', 'base_point', 'index_code', 'indx_sname', 'online_date', 'class_eseries']].copy() - df.columns = ['timestamp', 'base_point', 'code', 'name', 'list_date', 'class_eseries'] - df['category'] = df['class_eseries'].apply(lambda x: x.split(' ')[0].lower()) - df = df.drop('class_eseries', axis=1) - df = df.loc[df['code'].str.contains(r'^\d{6}$')] - - self.persist_index(df) - self.logger.info('上证、中证指数列表抓取完成...') - - # 抓取上证、中证指数成分股 - self.fetch_csi_index_component(df) - self.logger.info('上证、中证指数成分股抓取完成...') - - def fetch_csi_index_component(self, df: pd.DataFrame): - """ - 抓取上证、中证指数成分股 - """ - query_url = 'http://www.csindex.com.cn/uploads/file/autofile/cons/{}cons.xls' - - for _, index in df.iterrows(): - index_code = index['code'] - - url = query_url.format(index_code) - - try: - response = requests.get(url) - response.raise_for_status() - except requests.HTTPError as error: - self.logger.error(f'{index["name"]} - {index_code} 成分股抓取错误 ({error})') - continue - - response_df = pd.read_excel(io.BytesIO(response.content)) - - response_df = response_df[['成分券代码Constituent Code', '成分券名称Constituent Name']].rename( - columns={'成分券代码Constituent Code': 'stock_code', - '成分券名称Constituent Name': 'stock_name'}) - - index_id = f'index_cn_{index_code}' - response_df['entity_id'] = index_id - response_df['entity_type'] = 'index' - response_df['exchange'] = 'cn' - response_df['code'] = index_code - response_df['name'] = index['name'] - response_df['timestamp'] = now_pd_timestamp() - - response_df['stock_id'] = response_df['stock_code'].apply(lambda x: china_stock_code_to_id(str(x))) - response_df['id'] = response_df['stock_id'].apply( - lambda x: f'{index_id}_{x}') - - df_to_db(data_schema=self.data_schema, df=response_df, provider=self.provider, force_update=True) - self.logger.info(f'{index["name"]} - {index_code} 成分股抓取完成...') - - self.sleep() - - def fetch_szse_index(self) -> None: - """ - 抓取深证指数列表 - """ - url = 'http://www.szse.cn/api/report/ShowReport?SHOWTYPE=xlsx&CATALOGID=1812_zs&TABKEY=tab1' - response = requests.get(url) - df = pd.read_excel(io.BytesIO(response.content), dtype='str') - - df.columns = ['code', 'name', 'timestamp', 'base_point', 'list_date'] - df['category'] = 'szse' - df = df.loc[df['code'].str.contains(r'^\d{6}$')] - self.persist_index(df) - self.logger.info('深证指数列表抓取完成...') - - # 抓取深证指数成分股 - self.fetch_szse_index_component(df) - self.logger.info('深证指数成分股抓取完成...') - - def fetch_szse_index_component(self, df: pd.DataFrame): - """ - 抓取深证指数成分股 - """ - query_url = 'http://www.szse.cn/api/report/ShowReport?SHOWTYPE=xlsx&CATALOGID=1747_zs&TABKEY=tab1&ZSDM={}' - - for _, index in df.iterrows(): - index_code = index['code'] - - url = query_url.format(index_code) - response = requests.get(url) - - response_df = pd.read_excel(io.BytesIO(response.content), dtype='str') - - index_id = f'index_cn_{index_code}' - response_df['entity_id'] = index_id - response_df['entity_type'] = 'index' - response_df['exchange'] = 'cn' - response_df['code'] = index_code - response_df['name'] = index['name'] - response_df['timestamp'] = now_pd_timestamp() - - response_df.rename(columns={'证券代码': 'stock_code', '证券简称': 'stock_name'}, inplace=True) - response_df['stock_id'] = response_df['stock_code'].apply(lambda x: china_stock_code_to_id(str(x))) - - response_df['id'] = response_df['stock_id'].apply( - lambda x: f'{index_id}_{x}') - - df_to_db(data_schema=self.data_schema, df=response_df, provider=self.provider) - self.logger.info(f'{index["name"]} - {index_code} 成分股抓取完成...') - - self.sleep() - - def fetch_cni_index(self) -> None: - """ - 抓取国证指数列表 - """ - url = 'http://www.cnindex.com.cn/zstx/jcxl/' - response = requests.get(url) - response.encoding = 'utf-8' - dfs = pd.read_html(response.text) - - # 第 9 个 table 之后为非股票指数 - dfs = dfs[1:9] - - result_df = pd.DataFrame() - for df in dfs: - header = df.iloc[0] - df = df[1:] - df.columns = header - df.astype('str') - - result_df = pd.concat([result_df, df]) - - result_df = result_df.drop('样本股数量', axis=1) - result_df.columns = ['name', 'code', 'timestamp', 'base_point', 'list_date'] - result_df['timestamp'] = result_df['timestamp'].apply(lambda x: x.replace('-', '')) - result_df['list_date'] = result_df['list_date'].apply(lambda x: x.replace('-', '')) - result_df['category'] = 'csi' - result_df = result_df.loc[result_df['code'].str.contains(r'^\d{6}$')] - - self.persist_index(result_df) - self.logger.info('国证指数列表抓取完成...') - - # 抓取国证指数成分股 - self.fetch_cni_index_component(result_df) - self.logger.info('国证指数成分股抓取完成...') - - def fetch_cni_index_component(self, df: pd.DataFrame): - """ - 抓取国证指数成分股 - """ - query_url = 'http://www.cnindex.com.cn/docs/yb_{}.xls' - - for _, index in df.iterrows(): - index_code = index['code'] - - url = query_url.format(index_code) - - try: - response = requests.get(url) - response.raise_for_status() - except requests.HTTPError as error: - self.logger.error(f'{index["name"]} - {index_code} 成分股抓取错误 ({error})') - continue - - response_df = pd.read_excel(io.BytesIO(response.content), dtype='str') - - index_id = f'index_cn_{index_code}' - - try: - response_df = response_df[['样本股代码']] - except KeyError: - response_df = response_df[['证券代码']] - - response_df['entity_id'] = index_id - response_df['entity_type'] = 'index' - response_df['exchange'] = 'cn' - response_df['code'] = index_code - response_df['name'] = index['name'] - response_df['timestamp'] = now_pd_timestamp() - - response_df.columns = ['stock_code'] - response_df['stock_id'] = response_df['stock_code'].apply(lambda x: china_stock_code_to_id(str(x))) - response_df['id'] = response_df['stock_id'].apply( - lambda x: f'{index_id}_{x}') - - df_to_db(data_schema=self.data_schema, df=response_df, provider=self.provider) - self.logger.info(f'{index["name"]} - {index_code} 成分股抓取完成...') - - self.sleep() - - def persist_index(self, df) -> None: - df['timestamp'] = df['timestamp'].apply(lambda x: to_pd_timestamp(x)) - df['list_date'] = df['list_date'].apply(lambda x: to_pd_timestamp(x)) - df['id'] = df['code'].apply(lambda code: f'index_cn_{code}') - df['entity_id'] = df['id'] - df['exchange'] = 'cn' - df['entity_type'] = 'index' - - df = df.dropna(axis=0, how='any') - df = df.drop_duplicates(subset='id', keep='last') - - df_to_db(df=df, data_schema=Index, provider=self.provider, force_update=False) - - -__all__ = ['ChinaIndexListSpider'] - -if __name__ == '__main__': - spider = ChinaIndexListSpider(provider='exchange') - spider.run() diff --git a/zvt/recorders/exchange/china_stock_list_spider.py b/zvt/recorders/exchange/china_stock_list_spider.py deleted file mode 100644 index a36ee83a..00000000 --- a/zvt/recorders/exchange/china_stock_list_spider.py +++ /dev/null @@ -1,71 +0,0 @@ -# -*- coding: utf-8 -*- - -import io - -import pandas as pd -import requests - -from zvt.contract.api import df_to_db -from zvt.contract.recorder import Recorder -from zvt.domain import Stock, StockDetail -from zvt.recorders.consts import DEFAULT_SH_HEADER, DEFAULT_SZ_HEADER -from zvt.utils.time_utils import to_pd_timestamp - - -class ExchangeChinaStockListRecorder(Recorder): - data_schema = Stock - provider = 'exchange' - - def run(self): - url = 'http://query.sse.com.cn/security/stock/downloadStockListFile.do?csrcCode=&stockCode=&areaName=&stockType=1' - - resp = requests.get(url, headers=DEFAULT_SH_HEADER) - self.download_stock_list(response=resp, exchange='sh') - - url = 'http://www.szse.cn/api/report/ShowReport?SHOWTYPE=xlsx&CATALOGID=1110&TABKEY=tab1&random=0.20932135244582617' - - resp = requests.get(url, headers=DEFAULT_SZ_HEADER) - self.download_stock_list(response=resp, exchange='sz') - - def download_stock_list(self, response, exchange): - df = None - if exchange == 'sh': - df = pd.read_csv(io.BytesIO(response.content), sep='\s+', encoding='GB2312', dtype=str, - parse_dates=['上市日期'], error_bad_lines=False) - if df is not None: - df = df.loc[:, ['公司代码', '公司简称', '上市日期']] - - elif exchange == 'sz': - df = pd.read_excel(io.BytesIO(response.content), sheet_name='A股列表', dtype=str, parse_dates=['A股上市日期']) - if df is not None: - df = df.loc[:, ['A股代码', 'A股简称', 'A股上市日期']] - - if df is not None: - df.columns = ['code', 'name', 'list_date'] - - df = df.dropna(subset=['code']) - - # handle the dirty data - # 600996,贵广网络,2016-12-26,2016-12-26,sh,stock,stock_sh_600996,,次新股,贵州,, - df.loc[df['code'] == '600996', 'list_date'] = '2016-12-26' - print(df[df['list_date'] == '-']) - df['list_date'] = df['list_date'].apply(lambda x: to_pd_timestamp(x)) - df['exchange'] = exchange - df['entity_type'] = 'stock' - df['id'] = df[['entity_type', 'exchange', 'code']].apply(lambda x: '_'.join(x.astype(str)), axis=1) - df['entity_id'] = df['id'] - df['timestamp'] = df['list_date'] - df = df.dropna(axis=0, how='any') - df = df.drop_duplicates(subset=('id'), keep='last') - df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=False) - # persist StockDetail too - df_to_db(df=df, data_schema=StockDetail, provider=self.provider, force_update=False) - self.logger.info(df.tail()) - self.logger.info("persist stock list successs") - - -__all__ = ['ExchangeChinaStockListRecorder'] - -if __name__ == '__main__': - spider = ExchangeChinaStockListRecorder() - spider.run() diff --git a/zvt/recorders/exchange/sh_stock_summary_recorder.py b/zvt/recorders/exchange/sh_stock_summary_recorder.py deleted file mode 100644 index a7ecdddc..00000000 --- a/zvt/recorders/exchange/sh_stock_summary_recorder.py +++ /dev/null @@ -1,67 +0,0 @@ -import demjson -import pandas as pd -import requests - -from zvt.contract.recorder import TimestampsDataRecorder -from zvt.utils.time_utils import to_time_str -from zvt.utils.utils import to_float -from zvt.domain import Index -from zvt.domain.misc import StockSummary -from zvt.recorders.consts import DEFAULT_SH_SUMMARY_HEADER - - -class StockSummaryRecorder(TimestampsDataRecorder): - entity_provider = 'exchange' - entity_schema = Index - - provider = 'exchange' - data_schema = StockSummary - - url = 'http://query.sse.com.cn/marketdata/tradedata/queryTradingByProdTypeData.do?jsonCallBack=jsonpCallback30731&searchDate={}&prodType=gp&_=1515717065511' - - def __init__(self, exchanges=['sh', 'sz'], entity_ids=None, codes=['000001'], batch_size=10, - force_update=False, sleeping_time=5, default_size=2000, real_time=False, - fix_duplicate_way='add') -> None: - super().__init__('index', exchanges, entity_ids, codes, True, batch_size, force_update, sleeping_time, - default_size, real_time, fix_duplicate_way) - - def init_timestamps(self, entity): - return pd.date_range(start=entity.timestamp, - end=pd.Timestamp.now(), - freq='B').tolist() - - def record(self, entity, start, end, size, timestamps): - json_results = [] - for timestamp in timestamps: - timestamp_str = to_time_str(timestamp) - url = self.url.format(timestamp_str) - response = requests.get(url=url, headers=DEFAULT_SH_SUMMARY_HEADER) - - results = demjson.decode(response.text[response.text.index("(") + 1:response.text.index(")")])['result'] - result = [result for result in results if result['productType'] == '1'] - if result and len(result) == 1: - result_json = result[0] - # 有些较老的数据不存在,默认设为0.0 - json_results.append({ - 'provider': 'exchange', - 'timestamp': timestamp, - 'name': '上证指数', - 'pe': to_float(result_json['profitRate'], 0.0), - 'total_value': to_float(result_json['marketValue1'] + '亿', 0.0), - 'total_tradable_vaule': to_float(result_json['negotiableValue1'] + '亿', 0.0), - 'volume': to_float(result_json['trdVol1'] + '万', 0.0), - 'turnover': to_float(result_json['trdAmt1'] + '亿', 0.0), - 'turnover_rate': to_float(result_json['exchangeRate'], 0.0), - }) - - if len(json_results) > self.batch_size: - return json_results - - return json_results - - def get_data_map(self): - return None - - -if __name__ == '__main__': - StockSummaryRecorder(batch_size=30).run() diff --git a/zvt/recorders/joinquant/fundamental/__init__.py b/zvt/recorders/joinquant/fundamental/__init__.py deleted file mode 100644 index f8663686..00000000 --- a/zvt/recorders/joinquant/fundamental/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# the __all__ is generated -__all__ = [] - -# __init__.py structure: -# common code of the package -# export interface in __all__ which contains __all__ of its sub modules - -# import all from submodule stock_valuation_recorder -from .stock_valuation_recorder import * -from .stock_valuation_recorder import __all__ as _stock_valuation_recorder_all -__all__ += _stock_valuation_recorder_all - -# import all from submodule joinquant_margin_trading_recorder -from .joinquant_margin_trading_recorder import * -from .joinquant_margin_trading_recorder import __all__ as _joinquant_margin_trading_recorder_all -__all__ += _joinquant_margin_trading_recorder_all - -# import all from submodule etf_valuation_recorder -from .etf_valuation_recorder import * -from .etf_valuation_recorder import __all__ as _etf_valuation_recorder_all -__all__ += _etf_valuation_recorder_all \ No newline at end of file diff --git a/zvt/recorders/joinquant/fundamental/joinquant_margin_trading_recorder.py b/zvt/recorders/joinquant/fundamental/joinquant_margin_trading_recorder.py deleted file mode 100644 index 7133d53a..00000000 --- a/zvt/recorders/joinquant/fundamental/joinquant_margin_trading_recorder.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- - -import pandas as pd -from jqdatapy.api import get_mtss - -from zvt.contract.api import df_to_db -from zvt.contract.recorder import TimeSeriesDataRecorder -from zvt.domain import Stock, MarginTrading -from zvt.recorders.joinquant.common import to_jq_entity_id -from zvt.utils.pd_utils import pd_is_not_null -from zvt.utils.time_utils import to_time_str, TIME_FORMAT_DAY - - -class MarginTradingRecorder(TimeSeriesDataRecorder): - entity_provider = 'joinquant' - entity_schema = Stock - - # 数据来自jq - provider = 'joinquant' - - data_schema = MarginTrading - - def __init__(self, entity_type='stock', exchanges=None, entity_ids=None, codes=None, day_data=True, batch_size=10, - force_update=False, sleeping_time=5, default_size=2000, real_time=False, fix_duplicate_way='add', - start_timestamp=None, end_timestamp=None, close_hour=0, close_minute=0) -> None: - super().__init__(entity_type, exchanges, entity_ids, codes, day_data, batch_size, force_update, sleeping_time, - default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour, - close_minute) - - def record(self, entity, start, end, size, timestamps): - df = get_mtss(code=to_jq_entity_id(entity), date=to_time_str(start)) - - if pd_is_not_null(df): - df['entity_id'] = entity.id - df['code'] = entity.code - df.rename(columns={'date': 'timestamp'}, inplace=True) - df['timestamp'] = pd.to_datetime(df['timestamp']) - df['id'] = df[['entity_id', 'timestamp']].apply( - lambda se: "{}_{}".format(se['entity_id'], to_time_str(se['timestamp'], fmt=TIME_FORMAT_DAY)), axis=1) - - print(df) - df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) - - return None - - -if __name__ == '__main__': - MarginTradingRecorder(codes=['000004']).run() -# the __all__ is generated -__all__ = ['MarginTradingRecorder'] \ No newline at end of file diff --git a/zvt/recorders/joinquant/fundamental/stock_valuation_recorder.py b/zvt/recorders/joinquant/fundamental/stock_valuation_recorder.py deleted file mode 100644 index 40962af6..00000000 --- a/zvt/recorders/joinquant/fundamental/stock_valuation_recorder.py +++ /dev/null @@ -1,70 +0,0 @@ -# -*- coding: utf-8 -*- - -import pandas as pd -from jqdatapy.api import get_fundamentals -from pandas._libs.tslibs.timedeltas import Timedelta - -from zvt.contract.api import df_to_db -from zvt.contract.recorder import TimeSeriesDataRecorder -from zvt.domain import Stock, StockValuation, Etf -from zvt.recorders.joinquant.common import to_jq_entity_id -from zvt.utils.time_utils import now_pd_timestamp, to_time_str, to_pd_timestamp - - -class JqChinaStockValuationRecorder(TimeSeriesDataRecorder): - entity_provider = 'joinquant' - entity_schema = Stock - - # 数据来自jq - provider = 'joinquant' - - data_schema = StockValuation - - def __init__(self, entity_type='stock', exchanges=None, entity_ids=None, codes=None, day_data=True, batch_size=10, - force_update=False, sleeping_time=5, default_size=2000, real_time=False, fix_duplicate_way='add', - start_timestamp=None, end_timestamp=None, close_hour=0, close_minute=0) -> None: - super().__init__(entity_type, exchanges, entity_ids, codes, day_data, batch_size, force_update, sleeping_time, - default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour, - close_minute) - - def record(self, entity, start, end, size, timestamps): - start = max(start, to_pd_timestamp('2005-01-01')) - end = min(now_pd_timestamp(), start + Timedelta(days=500)) - - count: Timedelta = end - start - - # df = get_fundamentals_continuously(q, end_date=now_time_str(), count=count.days + 1, panel=False) - df = get_fundamentals(table='valuation', code=to_jq_entity_id(entity), date=to_time_str(end), - count=min(count.days, 500)) - df['entity_id'] = entity.id - df['timestamp'] = pd.to_datetime(df['day']) - df['code'] = entity.code - df['name'] = entity.name - df['id'] = df['timestamp'].apply(lambda x: "{}_{}".format(entity.id, to_time_str(x))) - df = df.rename({'pe_ratio_lyr': 'pe', - 'pe_ratio': 'pe_ttm', - 'pb_ratio': 'pb', - 'ps_ratio': 'ps', - 'pcf_ratio': 'pcf'}, - axis='columns') - - df['market_cap'] = df['market_cap'] * 100000000 - df['circulating_market_cap'] = df['circulating_market_cap'] * 100000000 - df['capitalization'] = df['capitalization'] * 10000 - df['circulating_cap'] = df['circulating_cap'] * 10000 - df['turnover_ratio'] = df['turnover_ratio'] * 0.01 - df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) - - return None - - -if __name__ == '__main__': - # 上证50 - df = Etf.get_stocks(code='510050') - stocks = df.stock_id.tolist() - print(stocks) - print(len(stocks)) - - JqChinaStockValuationRecorder(entity_ids=['stock_sz_300999'], force_update=True).run() -# the __all__ is generated -__all__ = ['JqChinaStockValuationRecorder'] \ No newline at end of file diff --git a/zvt/recorders/joinquant/meta/__init__.py b/zvt/recorders/joinquant/meta/__init__.py deleted file mode 100644 index 7298b976..00000000 --- a/zvt/recorders/joinquant/meta/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# the __all__ is generated -__all__ = [] - -# __init__.py structure: -# common code of the package -# export interface in __all__ which contains __all__ of its sub modules - -# import all from submodule china_fund_meta_recorder -from .china_fund_meta_recorder import * -from .china_fund_meta_recorder import __all__ as _china_fund_meta_recorder_all -__all__ += _china_fund_meta_recorder_all - -# import all from submodule china_stock_meta_recorder -from .china_stock_meta_recorder import * -from .china_stock_meta_recorder import __all__ as _china_stock_meta_recorder_all -__all__ += _china_stock_meta_recorder_all - -# import all from submodule stock_trade_day_recorder -from .stock_trade_day_recorder import * -from .stock_trade_day_recorder import __all__ as _stock_trade_day_recorder_all -__all__ += _stock_trade_day_recorder_all \ No newline at end of file diff --git a/zvt/recorders/joinquant/meta/china_fund_meta_recorder.py b/zvt/recorders/joinquant/meta/china_fund_meta_recorder.py deleted file mode 100644 index 6ccc8d6c..00000000 --- a/zvt/recorders/joinquant/meta/china_fund_meta_recorder.py +++ /dev/null @@ -1,143 +0,0 @@ -# -*- coding: utf-8 -*- -import pandas as pd -from jqdatapy.api import run_query - -from zvt.api import portfolio_relate_stock, china_stock_code_to_id -from zvt.contract.api import df_to_db -from zvt.contract.recorder import Recorder, TimeSeriesDataRecorder -from zvt.domain.meta.fund_meta import Fund, FundStock -from zvt.recorders.joinquant.common import to_entity_id, jq_to_report_period -from zvt.utils.pd_utils import pd_is_not_null -from zvt.utils.time_utils import to_time_str, next_date, now_pd_timestamp, is_same_date - - -class JqChinaFundRecorder(Recorder): - provider = 'joinquant' - data_schema = Fund - - def run(self): - # 按不同类别抓取 - # 编码 基金运作方式 - # 401001 开放式基金 - # 401002 封闭式基金 - # 401003 QDII - # 401004 FOF - # 401005 ETF - # 401006 LOF - for operate_mode_id in (401001, 401002, 401005): - year_count = 2 - while True: - latest = Fund.query_data(filters=[Fund.operate_mode_id == operate_mode_id], order=Fund.timestamp.desc(), - limit=1, return_type='domain') - start_timestamp = '2000-01-01' - if latest: - start_timestamp = latest[0].timestamp - - end_timestamp = min(next_date(start_timestamp, 365 * year_count), now_pd_timestamp()) - - df = run_query(table='finance.FUND_MAIN_INFO', - conditions=f'operate_mode_id#=#{operate_mode_id}&start_date#>=#{to_time_str(start_timestamp)}&start_date#<=#{to_time_str(end_timestamp)}', - parse_dates=['start_date', 'end_date'], - dtype={'main_code': str}) - if not pd_is_not_null(df) or (df['start_date'].max().year < end_timestamp.year): - year_count = year_count + 1 - - if pd_is_not_null(df): - df.rename(columns={'start_date': 'timestamp'}, inplace=True) - df['timestamp'] = pd.to_datetime(df['timestamp']) - df['list_date'] = df['timestamp'] - df['end_date'] = pd.to_datetime(df['end_date']) - - df['code'] = df['main_code'] - df['entity_id'] = df['code'].apply(lambda x: to_entity_id(entity_type='fund', jq_code=x)) - df['id'] = df['entity_id'] - df['entity_type'] = 'fund' - df['exchange'] = 'sz' - df_to_db(df, data_schema=Fund, provider=self.provider, force_update=self.force_update) - self.logger.info( - f'persist fund {operate_mode_id} list success {start_timestamp} to {end_timestamp}') - - if is_same_date(end_timestamp, now_pd_timestamp()): - break - - -class JqChinaFundStockRecorder(TimeSeriesDataRecorder): - entity_provider = 'joinquant' - entity_schema = Fund - - provider = 'joinquant' - data_schema = FundStock - - def __init__(self, entity_ids=None, codes=None, day_data=True, batch_size=10, - force_update=False, sleeping_time=5, default_size=2000, real_time=False, fix_duplicate_way='add', - start_timestamp=None, end_timestamp=None, close_hour=0, close_minute=0) -> None: - super().__init__('fund', ['sh', 'sz'], entity_ids, codes, day_data, batch_size, force_update, sleeping_time, - default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour, - close_minute) - - def init_entities(self): - # 只抓股票型,混合型并且没退市的持仓, - self.entities = Fund.query_data( - entity_ids=self.entity_ids, - codes=self.codes, - return_type='domain', - provider=self.entity_provider, - filters=[Fund.underlying_asset_type.in_(('股票型', '混合型')), Fund.end_date.is_(None)]) - - def record(self, entity, start, end, size, timestamps): - # 忽略退市的 - if entity.end_date: - return None - redundant_times = 1 - while redundant_times > 0: - df = run_query(table='finance.FUND_PORTFOLIO_STOCK', - conditions=f'pub_date#>=#{to_time_str(start)}&code#=#{entity.code}', - parse_dates=None) - df = df.dropna() - if pd_is_not_null(df): - # data format - # id code period_start period_end pub_date report_type_id report_type rank symbol name shares market_cap proportion - # 0 8640569 159919 2018-07-01 2018-09-30 2018-10-26 403003 第三季度 1 601318 中国平安 19869239.0 1.361043e+09 7.09 - # 1 8640570 159919 2018-07-01 2018-09-30 2018-10-26 403003 第三季度 2 600519 贵州茅台 921670.0 6.728191e+08 3.50 - # 2 8640571 159919 2018-07-01 2018-09-30 2018-10-26 403003 第三季度 3 600036 招商银行 18918815.0 5.806184e+08 3.02 - # 3 8640572 159919 2018-07-01 2018-09-30 2018-10-26 403003 第三季度 4 601166 兴业银行 22862332.0 3.646542e+08 1.90 - df['timestamp'] = pd.to_datetime(df['pub_date']) - - df.rename(columns={'symbol': 'stock_code', 'name': 'stock_name'}, inplace=True) - df['proportion'] = df['proportion'] * 0.01 - - df = portfolio_relate_stock(df, entity) - - df['stock_id'] = df['stock_code'].apply(lambda x: china_stock_code_to_id(x)) - df['id'] = df[['entity_id', 'stock_id', 'pub_date', 'id']].apply(lambda x: '_'.join(x.astype(str)), - axis=1) - df['report_date'] = pd.to_datetime(df['period_end']) - df['report_period'] = df['report_type'].apply(lambda x: jq_to_report_period(x)) - - saved = df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, - force_update=self.force_update) - - # 取不到非重复的数据 - if saved == 0: - return None - - # self.logger.info(df.tail()) - self.logger.info( - f"persist fund {entity.code}({entity.name}) portfolio success {df.iloc[-1]['pub_date']}") - latest = df['timestamp'].max() - - # 取到了最近两年的数据,再请求一次,确保取完最新的数据 - if latest.year >= now_pd_timestamp().year - 1: - redundant_times = redundant_times - 1 - start = latest - else: - return None - - return None - - -if __name__ == '__main__': - # JqChinaFundRecorder().run() - JqChinaFundStockRecorder(codes=['000053']).run() -# the __all__ is generated -__all__ = ['JqChinaFundRecorder', 'JqChinaFundStockRecorder'] diff --git a/zvt/recorders/joinquant/meta/stock_trade_day_recorder.py b/zvt/recorders/joinquant/meta/stock_trade_day_recorder.py deleted file mode 100644 index 1c4ec268..00000000 --- a/zvt/recorders/joinquant/meta/stock_trade_day_recorder.py +++ /dev/null @@ -1,42 +0,0 @@ -# -*- coding: utf-8 -*- -import pandas as pd -from jqdatapy.api import get_trade_days - -from zvt.contract.api import df_to_db -from zvt.contract.recorder import TimeSeriesDataRecorder -from zvt.domain import StockTradeDay, Stock -from zvt.utils.time_utils import to_time_str - - -class StockTradeDayRecorder(TimeSeriesDataRecorder): - entity_provider = 'joinquant' - entity_schema = Stock - - provider = 'joinquant' - data_schema = StockTradeDay - - def __init__(self, entity_type='stock', exchanges=['sh', 'sz'], entity_ids=None, codes=None, day_data=False, - batch_size=10, force_update=False, sleeping_time=5, default_size=2000, real_time=False, - fix_duplicate_way='add', start_timestamp=None, end_timestamp=None, close_hour=0, close_minute=0, - entity_filters=None) -> None: - super().__init__(entity_type, exchanges, entity_ids, ['000001'], day_data, batch_size, force_update, - sleeping_time, default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, - close_hour, close_minute, entity_filters) - - def record(self, entity, start, end, size, timestamps): - df = pd.DataFrame() - dates = get_trade_days(date=to_time_str(start)) - dates = dates.iloc[:, 0] - self.logger.info(f'add dates:{dates}') - df['timestamp'] = pd.to_datetime(dates) - df['id'] = [to_time_str(date) for date in dates] - df['entity_id'] = 'stock_sz_000001' - - df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) - - -if __name__ == '__main__': - r = StockTradeDayRecorder() - r.run() -# the __all__ is generated -__all__ = ['StockTradeDayRecorder'] diff --git a/zvt/recorders/joinquant/misc/__init__.py b/zvt/recorders/joinquant/misc/__init__.py deleted file mode 100644 index 14fa4edc..00000000 --- a/zvt/recorders/joinquant/misc/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# the __all__ is generated -__all__ = [] - -# __init__.py structure: -# common code of the package -# export interface in __all__ which contains __all__ of its sub modules - -# import all from submodule joinquant_hk_holder_recorder -from .joinquant_hk_holder_recorder import * -from .joinquant_hk_holder_recorder import __all__ as _joinquant_hk_holder_recorder_all -__all__ += _joinquant_hk_holder_recorder_all - -# import all from submodule joinquant_index_money_flow_recorder -from .joinquant_index_money_flow_recorder import * -from .joinquant_index_money_flow_recorder import __all__ as _joinquant_index_money_flow_recorder_all -__all__ += _joinquant_index_money_flow_recorder_all - -# import all from submodule joinquant_stock_money_flow_recorder -from .joinquant_stock_money_flow_recorder import * -from .joinquant_stock_money_flow_recorder import __all__ as _joinquant_stock_money_flow_recorder_all -__all__ += _joinquant_stock_money_flow_recorder_all \ No newline at end of file diff --git a/zvt/recorders/joinquant/misc/joinquant_hk_holder_recorder.py b/zvt/recorders/joinquant/misc/joinquant_hk_holder_recorder.py deleted file mode 100644 index 28cd159e..00000000 --- a/zvt/recorders/joinquant/misc/joinquant_hk_holder_recorder.py +++ /dev/null @@ -1,87 +0,0 @@ -import pandas as pd -from jqdatapy.api import run_query - -from zvt.contract.api import df_to_db, get_data -from zvt.contract.recorder import TimestampsDataRecorder -from zvt.domain import Index -from zvt.domain.misc.holder import HkHolder -from zvt.recorders.joinquant.common import to_entity_id -from zvt.utils.pd_utils import pd_is_not_null -from zvt.utils.time_utils import to_time_str, TIME_FORMAT_DAY, to_pd_timestamp - - -# 这里选择继承TimestampsDataRecorder是因为 -# 1)时间上就是交易日的列表,这个是可知的,可以以此为增量计算点 -# 2)HkHolder数据结构的设计: -# 沪股通/深股通 每日 持有 标的(股票)的情况 -# 抓取的角度是entity从Index中获取 沪股通/深股通,然后按 每日 去获取 - -class JoinquantHkHolderRecorder(TimestampsDataRecorder): - entity_provider = 'joinquant' - entity_schema = Index - - provider = 'joinquant' - data_schema = HkHolder - - def __init__(self, day_data=False, - force_update=False, sleeping_time=5, default_size=2000, real_time=False, - start_timestamp=None, end_timestamp=None) -> None: - # 聚宽编码 - # 市场通编码 市场通名称 - # 310001 沪股通 - # 310002 深股通 - # 310003 港股通(沪) - # 310004 港股通(深) - codes = ['310001', '310002'] - - super().__init__('index', ['sh', 'sz'], None, codes, day_data, 10, force_update, sleeping_time, - default_size, real_time, 'ignore', start_timestamp, end_timestamp, 0, 0) - - def init_timestamps(self, entity): - # 聚宽数据从2017年3月17开始 - return pd.date_range(start=to_pd_timestamp('2017-3-17'), - end=pd.Timestamp.now(), - freq='B').tolist() - - # 覆盖这个方式是因为,HkHolder里面entity其实是股票,而recorder中entity是 Index类型(沪股通/深股通) - def get_latest_saved_record(self, entity): - order = eval('self.data_schema.{}.desc()'.format(self.get_evaluated_time_field())) - - records = get_data(filters=[HkHolder.holder_code == entity.code], - provider=self.provider, - data_schema=self.data_schema, - order=order, - limit=1, - return_type='domain', - session=self.session) - if records: - return records[0] - return None - - def record(self, entity, start, end, size, timestamps): - for timestamp in timestamps: - df = run_query(table='finance.STK_HK_HOLD_INFO', - conditions=f'link_id#=#{entity.code}&day#=#{to_time_str(timestamp)}') - print(df) - - if pd_is_not_null(df): - df.rename(columns={'day': 'timestamp', 'link_id': 'holder_code', 'link_name': 'holder_name'}, - inplace=True) - df['timestamp'] = pd.to_datetime(df['timestamp']) - - df['entity_id'] = df['code'].apply(lambda x: to_entity_id(entity_type='stock', jq_code=x)) - df['code'] = df['code'].apply(lambda x: x.split('.')[0]) - - # id格式为:{holder_name}_{entity_id}_{timestamp} - df['id'] = df[['holder_name', 'entity_id', 'timestamp']].apply( - lambda se: "{}_{}_{}".format(se['holder_name'], se['entity_id'], - to_time_str(se['timestamp'], fmt=TIME_FORMAT_DAY)), - axis=1) - - df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) - - -if __name__ == '__main__': - JoinquantHkHolderRecorder(sleeping_time=10).run() -# the __all__ is generated -__all__ = ['JoinquantHkHolderRecorder'] diff --git a/zvt/recorders/joinquant/misc/joinquant_index_money_flow_recorder.py b/zvt/recorders/joinquant/misc/joinquant_index_money_flow_recorder.py deleted file mode 100644 index c54ee776..00000000 --- a/zvt/recorders/joinquant/misc/joinquant_index_money_flow_recorder.py +++ /dev/null @@ -1,81 +0,0 @@ -# -*- coding: utf-8 -*- -import pandas as pd - -from zvt.contract import IntervalLevel -from zvt.contract.api import df_to_db -from zvt.contract.recorder import FixedCycleDataRecorder -from zvt.domain import IndexMoneyFlow, Index, StockMoneyFlow -from zvt.utils import pd_is_not_null, to_time_str - - -class JoinquantIndexMoneyFlowRecorder(FixedCycleDataRecorder): - entity_provider = 'joinquant' - entity_schema = Index - - provider = 'joinquant' - data_schema = IndexMoneyFlow - - def __init__(self, exchanges=['sh', 'sz'], entity_ids=None, codes=None, day_data=False, batch_size=10, - force_update=True, sleeping_time=0, default_size=2000, real_time=False, fix_duplicate_way='ignore', - start_timestamp=None, end_timestamp=None, close_hour=0, close_minute=0, level=IntervalLevel.LEVEL_1DAY, - kdata_use_begin_time=False, one_day_trading_minutes=24 * 60) -> None: - # 上证指数,深证成指,创业板指,科创板 - support_codes = ['000001', '399001', '399006', '000688'] - if not codes: - codes = support_codes - else: - codes = list(set(codes) & set(support_codes)) - - super().__init__('index', exchanges, entity_ids, codes, day_data, batch_size, force_update, sleeping_time, - default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour, - close_minute, level, kdata_use_begin_time, one_day_trading_minutes) - - def record(self, entity, start, end, size, timestamps): - # 上证 - if entity.code == '000001': - all_df = StockMoneyFlow.query_data(provider=self.provider, start_timestamp=start, - filters=[StockMoneyFlow.entity_id.like('stock_sh%')]) - # 深证 - elif entity.code == '399001': - all_df = StockMoneyFlow.query_data(provider=self.provider, start_timestamp=start, - filters=[StockMoneyFlow.entity_id.like('stock_sz%')]) - # 创业板 - elif entity.code == '399006': - all_df = StockMoneyFlow.query_data(provider=self.provider, start_timestamp=start, - filters=[StockMoneyFlow.code.like('300%')]) - # 科创板 - elif entity.code == '000688': - all_df = StockMoneyFlow.query_data(provider=self.provider, start_timestamp=start, - filters=[StockMoneyFlow.code.like('688%')]) - - if pd_is_not_null(all_df): - g = all_df.groupby('timestamp') - for timestamp, df in g: - se = pd.Series({'id': "{}_{}".format(entity.id, to_time_str(timestamp)), - 'entity_id': entity.id, - 'timestamp': timestamp, - 'code': entity.code, - 'name': entity.name}) - for col in ['net_main_inflows', 'net_huge_inflows', 'net_big_inflows', 'net_medium_inflows', - 'net_small_inflows']: - se[col] = df[col].sum() - - for col in ['net_main_inflow_rate', 'net_huge_inflow_rate', 'net_big_inflow_rate', - 'net_medium_inflow_rate', - 'net_small_inflow_rate']: - se[col] = df[col].sum() / len(df) - - index_df = se.to_frame().T - - self.logger.info(index_df) - - df_to_db(df=index_df, data_schema=self.data_schema, provider=self.provider, - force_update=self.force_update) - - return None - - -if __name__ == '__main__': - JoinquantIndexMoneyFlowRecorder(start_timestamp='2020-12-01').run() -# the __all__ is generated -__all__ = ['JoinquantIndexMoneyFlowRecorder'] \ No newline at end of file diff --git a/zvt/recorders/joinquant/misc/joinquant_stock_money_flow_recorder.py b/zvt/recorders/joinquant/misc/joinquant_stock_money_flow_recorder.py deleted file mode 100644 index fbcd2609..00000000 --- a/zvt/recorders/joinquant/misc/joinquant_stock_money_flow_recorder.py +++ /dev/null @@ -1,120 +0,0 @@ -# -*- coding: utf-8 -*- -import pandas as pd -from jqdatapy import get_token, get_money_flow - -from zvt import zvt_config -from zvt.api import generate_kdata_id -from zvt.contract import IntervalLevel -from zvt.contract.api import df_to_db -from zvt.contract.recorder import FixedCycleDataRecorder -from zvt.domain import StockMoneyFlow, Stock -from zvt.recorders.joinquant.common import to_jq_entity_id -from zvt.recorders.joinquant.misc.joinquant_index_money_flow_recorder import JoinquantIndexMoneyFlowRecorder -from zvt.utils import pd_is_not_null, to_time_str -from zvt.utils.time_utils import TIME_FORMAT_DAY - - -class JoinquantStockMoneyFlowRecorder(FixedCycleDataRecorder): - entity_provider = 'joinquant' - entity_schema = Stock - - provider = 'joinquant' - data_schema = StockMoneyFlow - - def __init__(self, entity_type='stock', exchanges=['sh', 'sz'], entity_ids=None, codes=None, day_data=False, batch_size=10, - force_update=True, sleeping_time=0, default_size=2000, real_time=False, fix_duplicate_way='ignore', - start_timestamp=None, end_timestamp=None, close_hour=0, close_minute=0, level=IntervalLevel.LEVEL_1DAY, - kdata_use_begin_time=False, one_day_trading_minutes=24 * 60, compute_index_money_flow=False) -> None: - self.compute_index_money_flow = compute_index_money_flow - super().__init__(entity_type, exchanges, entity_ids, codes, day_data, batch_size, force_update, sleeping_time, - default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour, - close_minute, level, kdata_use_begin_time, one_day_trading_minutes) - get_token(zvt_config['jq_username'], zvt_config['jq_password'], force=True) - - def generate_domain_id(self, entity, original_data): - return generate_kdata_id(entity_id=entity.id, timestamp=original_data['timestamp'], level=self.level) - - def on_finish(self): - # 根据 个股资金流 计算 大盘资金流 - if self.compute_index_money_flow: - JoinquantIndexMoneyFlowRecorder().run() - - def record(self, entity, start, end, size, timestamps): - if not self.end_timestamp: - df = get_money_flow(code=to_jq_entity_id(entity), - date=to_time_str(start)) - else: - df = get_money_flow(code=to_jq_entity_id(entity), - date=start, end_date=to_time_str(self.end_timestamp)) - - df = df.dropna() - - if pd_is_not_null(df): - df['name'] = entity.name - df.rename(columns={'date': 'timestamp', - 'net_amount_main': 'net_main_inflows', - 'net_pct_main': 'net_main_inflow_rate', - - 'net_amount_xl': 'net_huge_inflows', - 'net_pct_xl': 'net_huge_inflow_rate', - - 'net_amount_l': 'net_big_inflows', - 'net_pct_l': 'net_big_inflow_rate', - - 'net_amount_m': 'net_medium_inflows', - 'net_pct_m': 'net_medium_inflow_rate', - - 'net_amount_s': 'net_small_inflows', - 'net_pct_s': 'net_small_inflow_rate' - }, inplace=True) - - # 转换到标准float - inflows_cols = ['net_main_inflows', 'net_huge_inflows', 'net_big_inflows', 'net_medium_inflows', - 'net_small_inflows'] - for col in inflows_cols: - df[col] = pd.to_numeric(df[col], errors='coerce') - df = df.dropna() - - if not pd_is_not_null(df): - return None - - df[inflows_cols] = df[inflows_cols].apply(lambda x: x * 10000) - - inflow_rate_cols = ['net_main_inflow_rate', 'net_huge_inflow_rate', 'net_big_inflow_rate', - 'net_medium_inflow_rate', 'net_small_inflow_rate'] - for col in inflow_rate_cols: - df[col] = pd.to_numeric(df[col], errors='coerce') - df = df.dropna() - if not pd_is_not_null(df): - return None - - df[inflow_rate_cols] = df[inflow_rate_cols].apply(lambda x: x / 100) - - # 计算总流入 - df['net_inflows'] = df['net_huge_inflows'] + df['net_big_inflows'] + df['net_medium_inflows'] + df[ - 'net_small_inflows'] - # 计算总流入率 - amount = df['net_main_inflows'] / df['net_main_inflow_rate'] - df['net_inflow_rate'] = df['net_inflows'] / amount - - df['entity_id'] = entity.id - df['timestamp'] = pd.to_datetime(df['timestamp']) - df['provider'] = 'joinquant' - df['code'] = entity.code - - def generate_kdata_id(se): - return "{}_{}".format(se['entity_id'], to_time_str(se['timestamp'], fmt=TIME_FORMAT_DAY)) - - df['id'] = df[['entity_id', 'timestamp']].apply(generate_kdata_id, axis=1) - - df = df.drop_duplicates(subset='id', keep='last') - - df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) - - return None - - -if __name__ == '__main__': - JoinquantStockMoneyFlowRecorder(codes=['000578']).run() -# the __all__ is generated -__all__ = ['JoinquantStockMoneyFlowRecorder'] \ No newline at end of file diff --git a/zvt/recorders/joinquant/overall/__init__.py b/zvt/recorders/joinquant/overall/__init__.py deleted file mode 100644 index 119d3132..00000000 --- a/zvt/recorders/joinquant/overall/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# the __all__ is generated -__all__ = [] - -# __init__.py structure: -# common code of the package -# export interface in __all__ which contains __all__ of its sub modules - -# import all from submodule cross_market_recorder -from .cross_market_recorder import * -from .cross_market_recorder import __all__ as _cross_market_recorder_all -__all__ += _cross_market_recorder_all - -# import all from submodule margin_trading_recorder -from .margin_trading_recorder import * -from .margin_trading_recorder import __all__ as _margin_trading_recorder_all -__all__ += _margin_trading_recorder_all - -# import all from submodule stock_summary_recorder -from .stock_summary_recorder import * -from .stock_summary_recorder import __all__ as _stock_summary_recorder_all -__all__ += _stock_summary_recorder_all \ No newline at end of file diff --git a/zvt/recorders/joinquant/overall/cross_market_recorder.py b/zvt/recorders/joinquant/overall/cross_market_recorder.py deleted file mode 100644 index cc565ac3..00000000 --- a/zvt/recorders/joinquant/overall/cross_market_recorder.py +++ /dev/null @@ -1,68 +0,0 @@ -from jqdatapy.api import run_query - -from zvt.contract.recorder import TimeSeriesDataRecorder -from zvt.domain import Index, CrossMarketSummary -from zvt.utils.time_utils import to_time_str -from zvt.utils.utils import multiple_number - - -class CrossMarketSummaryRecorder(TimeSeriesDataRecorder): - entity_provider = 'exchange' - entity_schema = Index - - provider = 'joinquant' - data_schema = CrossMarketSummary - - def __init__(self, batch_size=10, - force_update=False, sleeping_time=5, default_size=2000, real_time=False, - fix_duplicate_way='add') -> None: - - # 聚宽编码 - # 市场通编码 市场通名称 - # 310001 沪股通 - # 310002 深股通 - # 310003 港股通(沪) - # 310004 港股通(深) - - codes = ['310001', '310002', '310003', '310004'] - super().__init__('index', ['sz'], None, codes, True, batch_size, - force_update, sleeping_time, - default_size, real_time, fix_duplicate_way) - - def init_entities(self): - super().init_entities() - - def record(self, entity, start, end, size, timestamps): - df = run_query(table='finance.STK_ML_QUOTA', conditions=f'link_id#=#{entity.code}&day#>=#{to_time_str(start)}') - print(df) - - json_results = [] - - for item in df.to_dict(orient='records'): - result = { - 'provider': self.provider, - 'timestamp': item['day'], - 'name': entity.name, - 'buy_amount': multiple_number(item['buy_amount'], 100000000), - 'buy_volume': item['buy_volume'], - 'sell_amount': multiple_number(item['sell_amount'], 100000000), - 'sell_volume': item['sell_volume'], - 'quota_daily': multiple_number(item['quota_daily'], 100000000), - 'quota_daily_balance': multiple_number(item['quota_daily_balance'], 100000000) - } - - json_results.append(result) - - if len(json_results) < 100: - self.one_shot = True - - return json_results - - def get_data_map(self): - return None - - -if __name__ == '__main__': - CrossMarketSummaryRecorder(batch_size=30).run() -# the __all__ is generated -__all__ = ['CrossMarketSummaryRecorder'] \ No newline at end of file diff --git a/zvt/recorders/joinquant/overall/margin_trading_recorder.py b/zvt/recorders/joinquant/overall/margin_trading_recorder.py deleted file mode 100644 index 67c2455b..00000000 --- a/zvt/recorders/joinquant/overall/margin_trading_recorder.py +++ /dev/null @@ -1,68 +0,0 @@ -from jqdatapy.api import run_query - -from zvt.contract.recorder import TimeSeriesDataRecorder -from zvt.domain import Index, MarginTradingSummary -from zvt.utils.time_utils import to_time_str - -# 聚宽编码 -# XSHG-上海证券交易所 -# XSHE-深圳证券交易所 - -code_map_jq = { - '000001': 'XSHG', - '399106': 'XSHE' -} - - -class MarginTradingSummaryRecorder(TimeSeriesDataRecorder): - entity_provider = 'exchange' - entity_schema = Index - - provider = 'joinquant' - data_schema = MarginTradingSummary - - def __init__(self, batch_size=10, - force_update=False, sleeping_time=5, default_size=2000, real_time=False, - fix_duplicate_way='add') -> None: - # 上海A股,深圳市场 - codes = ['000001', '399106'] - super().__init__('index', ['sh', 'sz'], None, codes, True, batch_size, - force_update, sleeping_time, - default_size, real_time, fix_duplicate_way) - - def record(self, entity, start, end, size, timestamps): - jq_code = code_map_jq.get(entity.code) - - df = run_query(table='finance.STK_MT_TOTAL', - conditions=f'exchange_code#=#{jq_code}&date#>=#{to_time_str(start)}', parse_dates=['date']) - print(df) - - json_results = [] - - for item in df.to_dict(orient='records'): - result = { - 'provider': self.provider, - 'timestamp': item['date'], - 'name': entity.name, - 'margin_value': item['fin_value'], - 'margin_buy': item['fin_buy_value'], - 'short_value': item['sec_value'], - 'short_volume': item['sec_sell_volume'], - 'total_value': item['fin_sec_value'] - } - - json_results.append(result) - - if len(json_results) < 100: - self.one_shot = True - - return json_results - - def get_data_map(self): - return None - - -if __name__ == '__main__': - MarginTradingSummaryRecorder(batch_size=30).run() -# the __all__ is generated -__all__ = ['MarginTradingSummaryRecorder'] \ No newline at end of file diff --git a/zvt/recorders/joinquant/overall/stock_summary_recorder.py b/zvt/recorders/joinquant/overall/stock_summary_recorder.py deleted file mode 100644 index aa1d2ef7..00000000 --- a/zvt/recorders/joinquant/overall/stock_summary_recorder.py +++ /dev/null @@ -1,79 +0,0 @@ -from jqdatapy.api import run_query - -from zvt.contract.recorder import TimeSeriesDataRecorder -from zvt.domain import Index -from zvt.domain import StockSummary -from zvt.utils.time_utils import to_time_str -from zvt.utils.utils import multiple_number - -# 聚宽编码 -# 322001 上海市场 -# 322002 上海A股 -# 322003 上海B股 -# 322004 深圳市场 该市场交易所未公布成交量和成交笔数 -# 322005 深市主板 -# 322006 中小企业板 -# 322007 创业板 - -code_map_jq = { - '000001': '322002', - '399106': '322004', - '399001': '322005', - '399005': '322006', - '399006': '322007' -} - - -class StockSummaryRecorder(TimeSeriesDataRecorder): - entity_provider = 'exchange' - entity_schema = Index - - provider = 'joinquant' - data_schema = StockSummary - - def __init__(self, batch_size=10, - force_update=False, sleeping_time=5, default_size=2000, real_time=False, - fix_duplicate_way='add') -> None: - # 上海A股,深圳市场,深圳成指,中小板,创业板 - codes = ['000001', '399106', '399001', '399005', '399006'] - super().__init__('index', ['sh', 'sz'], None, codes, True, batch_size, - force_update, sleeping_time, - default_size, real_time, fix_duplicate_way) - - def record(self, entity, start, end, size, timestamps): - jq_code = code_map_jq.get(entity.code) - - df = run_query(table='finance.STK_EXCHANGE_TRADE_INFO', - conditions=f'exchange_code#=#{jq_code}&date#>=#{to_time_str(start)}', parse_dates=['date']) - print(df) - - json_results = [] - - for item in df.to_dict(orient='records'): - result = { - 'provider': self.provider, - 'timestamp': item['date'], - 'name': entity.name, - 'pe': item['pe_average'], - 'total_value': multiple_number(item['total_market_cap'], 100000000), - 'total_tradable_vaule': multiple_number(item['circulating_market_cap'], 100000000), - 'volume': multiple_number(item['volume'], 10000), - 'turnover': multiple_number(item['money'], 100000000), - 'turnover_rate': item['turnover_ratio'] - } - - json_results.append(result) - - if len(json_results) < 100: - self.one_shot = True - - return json_results - - def get_data_map(self): - return None - - -if __name__ == '__main__': - StockSummaryRecorder(batch_size=30).run() -# the __all__ is generated -__all__ = ['StockSummaryRecorder'] \ No newline at end of file diff --git a/zvt/recorders/joinquant/quotes/jq_index_kdata_recorder.py b/zvt/recorders/joinquant/quotes/jq_index_kdata_recorder.py deleted file mode 100644 index bb837bb3..00000000 --- a/zvt/recorders/joinquant/quotes/jq_index_kdata_recorder.py +++ /dev/null @@ -1,119 +0,0 @@ -# -*- coding: utf-8 -*- -import argparse - -import pandas as pd -from jqdatapy.api import get_token, get_bars - -from zvt import init_log, zvt_config -from zvt.api.quote import generate_kdata_id, get_kdata_schema, get_kdata -from zvt.contract import IntervalLevel -from zvt.contract.api import df_to_db -from zvt.contract.recorder import FixedCycleDataRecorder -from zvt.domain import Index, IndexKdataCommon -from zvt.recorders.joinquant.common import to_jq_trading_level, to_jq_entity_id -from zvt.utils.pd_utils import pd_is_not_null -from zvt.utils.time_utils import to_time_str, TIME_FORMAT_DAY, TIME_FORMAT_ISO8601 - - -class JqChinaIndexKdataRecorder(FixedCycleDataRecorder): - entity_provider = 'joinquant' - entity_schema = Index - - # 数据来自jq - provider = 'joinquant' - - # 只是为了把recorder注册到data_schema - data_schema = IndexKdataCommon - - def __init__(self, - exchanges=['sh', 'sz'], - entity_ids=None, - codes=None, - day_data=True, - batch_size=10, - force_update=True, - sleeping_time=0, - default_size=2000, - real_time=False, - fix_duplicate_way='ignore', - start_timestamp=None, - end_timestamp=None, - level=IntervalLevel.LEVEL_1WEEK, - kdata_use_begin_time=False, - close_hour=15, - close_minute=0, - one_day_trading_minutes=4 * 60) -> None: - level = IntervalLevel(level) - self.data_schema = get_kdata_schema(entity_type='index', level=level) - self.jq_trading_level = to_jq_trading_level(level) - - super().__init__('index', exchanges, entity_ids, codes, day_data, batch_size, force_update, sleeping_time, - default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour, - close_minute, level, kdata_use_begin_time, one_day_trading_minutes) - - get_token(zvt_config['jq_username'], zvt_config['jq_password'], force=True) - - def init_entities(self): - super().init_entities() - # ignore no data index - self.entities = [entity for entity in self.entities if - entity.code not in ['310001', '310002', '310003', '310004']] - - def generate_domain_id(self, entity, original_data): - return generate_kdata_id(entity_id=entity.id, timestamp=original_data['timestamp'], level=self.level) - - def record(self, entity, start, end, size, timestamps): - if not self.end_timestamp: - df = get_bars(to_jq_entity_id(entity), - count=size, - unit=self.jq_trading_level, - # fields=['date', 'open', 'close', 'low', 'high', 'volume', 'money'] - ) - else: - end_timestamp = to_time_str(self.end_timestamp) - df = get_bars(to_jq_entity_id(entity), - count=size, - unit=self.jq_trading_level, - # fields=['date', 'open', 'close', 'low', 'high', 'volume', 'money'], - end_date=end_timestamp) - if pd_is_not_null(df): - df['name'] = entity.name - df.rename(columns={'money': 'turnover', 'date': 'timestamp'}, inplace=True) - - df['entity_id'] = entity.id - df['timestamp'] = pd.to_datetime(df['timestamp']) - df['provider'] = 'joinquant' - df['level'] = self.level.value - df['code'] = entity.code - - def generate_kdata_id(se): - if self.level >= IntervalLevel.LEVEL_1DAY: - return "{}_{}".format(se['entity_id'], to_time_str(se['timestamp'], fmt=TIME_FORMAT_DAY)) - else: - return "{}_{}".format(se['entity_id'], to_time_str(se['timestamp'], fmt=TIME_FORMAT_ISO8601)) - - df['id'] = df[['entity_id', 'timestamp']].apply(generate_kdata_id, axis=1) - - df = df.drop_duplicates(subset='id', keep='last') - - df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) - - return None - - -if __name__ == '__main__': - parser = argparse.ArgumentParser() - parser.add_argument('--level', help='trading level', default='1d', choices=[item.value for item in IntervalLevel]) - parser.add_argument('--codes', help='codes', default=['000001'], nargs='+') - - args = parser.parse_args() - - level = IntervalLevel(args.level) - codes = args.codes - - init_log('jq_china_stock_{}_kdata.log'.format(args.level)) - JqChinaIndexKdataRecorder(level=level, sleeping_time=0, codes=codes, real_time=False).run() - - print(get_kdata(entity_id='index_sh_000001', limit=10)) -# the __all__ is generated -__all__ = ['JqChinaIndexKdataRecorder'] \ No newline at end of file diff --git a/zvt/recorders/joinquant/quotes/jq_stock_kdata_recorder.py b/zvt/recorders/joinquant/quotes/jq_stock_kdata_recorder.py deleted file mode 100644 index a79e2aee..00000000 --- a/zvt/recorders/joinquant/quotes/jq_stock_kdata_recorder.py +++ /dev/null @@ -1,152 +0,0 @@ -# -*- coding: utf-8 -*- -import argparse - -import pandas as pd -from jqdatapy.api import get_token, get_bars - -from zvt import init_log, zvt_config -from zvt.api.quote import generate_kdata_id, get_kdata_schema, get_kdata -from zvt.contract import IntervalLevel, AdjustType -from zvt.contract.api import df_to_db -from zvt.contract.recorder import FixedCycleDataRecorder -from zvt.domain import Stock, StockKdataCommon, Stock1dHfqKdata, Stock1wkHfqKdata -from zvt.recorders.joinquant.common import to_jq_trading_level, to_jq_entity_id -from zvt.utils.pd_utils import pd_is_not_null -from zvt.utils.time_utils import to_time_str, now_pd_timestamp, TIME_FORMAT_DAY, TIME_FORMAT_ISO8601 - - -class JqChinaStockKdataRecorder(FixedCycleDataRecorder): - entity_provider = 'joinquant' - entity_schema = Stock - - # 数据来自jq - provider = 'joinquant' - - # 只是为了把recorder注册到data_schema - data_schema = StockKdataCommon - - def __init__(self, - exchanges=['sh', 'sz'], - entity_ids=None, - codes=None, - day_data=False, - batch_size=10, - force_update=True, - sleeping_time=0, - default_size=2000, - real_time=False, - fix_duplicate_way='ignore', - start_timestamp=None, - end_timestamp=None, - level=IntervalLevel.LEVEL_1WEEK, - kdata_use_begin_time=False, - close_hour=15, - close_minute=0, - one_day_trading_minutes=4 * 60, - adjust_type=AdjustType.qfq) -> None: - level = IntervalLevel(level) - adjust_type = AdjustType(adjust_type) - self.data_schema = get_kdata_schema(entity_type='stock', level=level, adjust_type=adjust_type) - self.jq_trading_level = to_jq_trading_level(level) - - super().__init__('stock', exchanges, entity_ids, codes, day_data, batch_size, force_update, sleeping_time, - default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour, - close_minute, level, kdata_use_begin_time, one_day_trading_minutes) - self.adjust_type = adjust_type - - get_token(zvt_config['jq_username'], zvt_config['jq_password'], force=True) - - def init_entities(self): - super().init_entities() - # 过滤掉退市的 - self.entities = [entity for entity in self.entities if - (entity.end_date is None) or (entity.end_date > now_pd_timestamp())] - - - def generate_domain_id(self, entity, original_data): - return generate_kdata_id(entity_id=entity.id, timestamp=original_data['timestamp'], level=self.level) - - def recompute_qfq(self, entity, qfq_factor, last_timestamp): - # 重新计算前复权数据 - if qfq_factor != 0: - kdatas = get_kdata(provider=self.provider, entity_id=entity.id, level=self.level.value, - order=self.data_schema.timestamp.asc(), - return_type='domain', - session=self.session, - filters=[self.data_schema.timestamp < last_timestamp]) - if kdatas: - self.logger.info('recomputing {} qfq kdata,factor is:{}'.format(entity.code, qfq_factor)) - for kdata in kdatas: - kdata.open = round(kdata.open * qfq_factor, 2) - kdata.close = round(kdata.close * qfq_factor, 2) - kdata.high = round(kdata.high * qfq_factor, 2) - kdata.low = round(kdata.low * qfq_factor, 2) - self.session.add_all(kdatas) - self.session.commit() - - def record(self, entity, start, end, size, timestamps): - if self.adjust_type == AdjustType.hfq: - fq_ref_date = '2000-01-01' - else: - fq_ref_date = to_time_str(now_pd_timestamp()) - - if not self.end_timestamp: - df = get_bars(to_jq_entity_id(entity), - count=size, - unit=self.jq_trading_level, - # fields=['date', 'open', 'close', 'low', 'high', 'volume', 'money'], - fq_ref_date=fq_ref_date) - else: - end_timestamp = to_time_str(self.end_timestamp) - df = get_bars(to_jq_entity_id(entity), - count=size, - unit=self.jq_trading_level, - # fields=['date', 'open', 'close', 'low', 'high', 'volume', 'money'], - end_date=end_timestamp, - fq_ref_date=fq_ref_date) - if pd_is_not_null(df): - df['name'] = entity.name - df.rename(columns={'money': 'turnover', 'date': 'timestamp'}, inplace=True) - - df['entity_id'] = entity.id - df['timestamp'] = pd.to_datetime(df['timestamp']) - df['provider'] = 'joinquant' - df['level'] = self.level.value - df['code'] = entity.code - - # 判断是否需要重新计算之前保存的前复权数据 - if self.adjust_type == AdjustType.qfq: - check_df = df.head(1) - check_date = check_df['timestamp'][0] - current_df = get_kdata(entity_id=entity.id, provider=self.provider, start_timestamp=check_date, - end_timestamp=check_date, limit=1, level=self.level, - adjust_type=self.adjust_type) - if pd_is_not_null(current_df): - old = current_df.iloc[0, :]['close'] - new = check_df['close'][0] - # 相同时间的close不同,表明前复权需要重新计算 - if round(old, 2) != round(new, 2): - qfq_factor = new / old - last_timestamp = pd.Timestamp(check_date) - self.recompute_qfq(entity, qfq_factor=qfq_factor, last_timestamp=last_timestamp) - - def generate_kdata_id(se): - if self.level >= IntervalLevel.LEVEL_1DAY: - return "{}_{}".format(se['entity_id'], to_time_str(se['timestamp'], fmt=TIME_FORMAT_DAY)) - else: - return "{}_{}".format(se['entity_id'], to_time_str(se['timestamp'], fmt=TIME_FORMAT_ISO8601)) - - df['id'] = df[['entity_id', 'timestamp']].apply(generate_kdata_id, axis=1) - - df = df.drop_duplicates(subset='id', keep='last') - - df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) - - return None - - -if __name__ == '__main__': - Stock1wkHfqKdata.record_data(codes=['300999']) - -# the __all__ is generated -__all__ = ['JqChinaStockKdataRecorder'] diff --git a/zvt/recorders/sina/__init__.py b/zvt/recorders/sina/__init__.py deleted file mode 100644 index 2d17b70e..00000000 --- a/zvt/recorders/sina/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from .china_etf_day_kdata_recorder import * -from .china_index_day_kdata_recorder import * -from .meta import * -from .money_flow import * diff --git a/zvt/recorders/sina/china_etf_day_kdata_recorder.py b/zvt/recorders/sina/china_etf_day_kdata_recorder.py deleted file mode 100644 index 57432541..00000000 --- a/zvt/recorders/sina/china_etf_day_kdata_recorder.py +++ /dev/null @@ -1,128 +0,0 @@ -# -*- coding: utf-8 -*- - -import demjson -import pandas as pd -import requests - -from zvt.contract import IntervalLevel -from zvt.contract.recorder import FixedCycleDataRecorder -from zvt.utils.time_utils import to_time_str -from zvt import init_log -from zvt.api.quote import generate_kdata_id -from zvt.api import get_kdata -from zvt.domain import Etf, Index, Etf1dKdata -from zvt.recorders.consts import EASTMONEY_ETF_NET_VALUE_HEADER - - -class ChinaETFDayKdataRecorder(FixedCycleDataRecorder): - entity_provider = 'exchange' - entity_schema = Etf - - provider = 'sina' - data_schema = Etf1dKdata - url = 'http://money.finance.sina.com.cn/quotes_service/api/json_v2.php/CN_MarketData.getKLineData?' \ - 'symbol={}{}&scale=240&&datalen={}&ma=no' - - def __init__(self, entity_type='etf', exchanges=['sh', 'sz'], entity_ids=None, codes=None, day_data=False, batch_size=10, - force_update=False, sleeping_time=10, default_size=2000, real_time=True, fix_duplicate_way='add', - start_timestamp=None, end_timestamp=None, - level=IntervalLevel.LEVEL_1DAY, kdata_use_begin_time=False, close_hour=0, close_minute=0, - one_day_trading_minutes=24 * 60) -> None: - super().__init__(entity_type, exchanges, entity_ids, codes, day_data, batch_size, force_update, sleeping_time, - default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour, - close_minute, level, kdata_use_begin_time, one_day_trading_minutes) - - def get_data_map(self): - return {} - - def generate_domain_id(self, entity, original_data): - return generate_kdata_id(entity_id=entity.id, timestamp=original_data['timestamp'], level=self.level) - - def on_finish_entity(self, entity): - kdatas = get_kdata(entity_id=entity.id, level=IntervalLevel.LEVEL_1DAY.value, - order=Etf1dKdata.timestamp.asc(), - return_type='domain', session=self.session, - filters=[Etf1dKdata.cumulative_net_value.is_(None)]) - - if kdatas and len(kdatas) > 0: - start = kdatas[0].timestamp - end = kdatas[-1].timestamp - - # 从东方财富获取基金累计净值 - df = self.fetch_cumulative_net_value(entity, start, end) - - if df is not None and not df.empty: - for kdata in kdatas: - if kdata.timestamp in df.index: - kdata.cumulative_net_value = df.loc[kdata.timestamp, 'LJJZ'] - kdata.change_pct = df.loc[kdata.timestamp, 'JZZZL'] - self.session.commit() - self.logger.info(f'{entity.code} - {entity.name}累计净值更新完成...') - - def fetch_cumulative_net_value(self, security_item, start, end) -> pd.DataFrame: - query_url = 'http://api.fund.eastmoney.com/f10/lsjz?' \ - 'fundCode={}&pageIndex={}&pageSize=200&startDate={}&endDate={}' - - page = 1 - df = pd.DataFrame() - while True: - url = query_url.format(security_item.code, page, to_time_str(start), to_time_str(end)) - - response = requests.get(url, headers=EASTMONEY_ETF_NET_VALUE_HEADER) - response_json = demjson.decode(response.text) - response_df = pd.DataFrame(response_json['Data']['LSJZList']) - - # 最后一页 - if response_df.empty: - break - - response_df['FSRQ'] = pd.to_datetime(response_df['FSRQ']) - response_df['JZZZL'] = pd.to_numeric(response_df['JZZZL'], errors='coerce') - response_df['LJJZ'] = pd.to_numeric(response_df['LJJZ'], errors='coerce') - response_df = response_df.fillna(0) - response_df.set_index('FSRQ', inplace=True, drop=True) - - df = pd.concat([df, response_df]) - page += 1 - - self.sleep() - - return df - - def record(self, entity, start, end, size, timestamps): - # 此 url 不支持分页,如果超过我们想取的条数,则只能取最大条数 - if start is None or size > self.default_size: - size = 8000 - - param = { - 'security_item': entity, - 'level': self.level.value, - 'size': size - } - - security_item = param['security_item'] - size = param['size'] - - url = ChinaETFDayKdataRecorder.url.format(security_item.exchange, security_item.code, size) - - response = requests.get(url) - response_json = demjson.decode(response.text) - - if response_json is None or len(response_json) == 0: - return [] - - df = pd.DataFrame(response_json) - df.rename(columns={'day': 'timestamp'}, inplace=True) - df['timestamp'] = pd.to_datetime(df['timestamp']) - df['name'] = security_item.name - df['provider'] = 'sina' - df['level'] = param['level'] - - return df.to_dict(orient='records') - - -__all__ = ['ChinaETFDayKdataRecorder'] - -if __name__ == '__main__': - init_log('sina_china_etf_day_kdata.log') - ChinaETFDayKdataRecorder(level=IntervalLevel.LEVEL_1DAY).run() diff --git a/zvt/recorders/sina/china_index_day_kdata_recorder.py b/zvt/recorders/sina/china_index_day_kdata_recorder.py deleted file mode 100644 index 527ebc5f..00000000 --- a/zvt/recorders/sina/china_index_day_kdata_recorder.py +++ /dev/null @@ -1,92 +0,0 @@ -# -*- coding: utf-8 -*- - -import time - -import pandas as pd -import requests - -from zvt.api.quote import generate_kdata_id -from zvt.contract import IntervalLevel -from zvt.contract.recorder import FixedCycleDataRecorder -from zvt.domain import Index, Index1dKdata -from zvt.utils.time_utils import get_year_quarters, is_same_date - - -class ChinaIndexDayKdataRecorder(FixedCycleDataRecorder): - entity_provider = 'exchange' - entity_schema = Index - - provider = 'sina' - data_schema = Index1dKdata - url = 'http://vip.stock.finance.sina.com.cn/corp/go.php/vMS_MarketHistory/stockid/{}/type/S.phtml?year={}&jidu={}' - - def __init__(self, entity_type='index', exchanges=['cn'], entity_ids=None, codes=None, day_data=False, - batch_size=10, - force_update=False, sleeping_time=10, default_size=2000, real_time=False, fix_duplicate_way='add', - start_timestamp=None, end_timestamp=None, - level=IntervalLevel.LEVEL_1DAY, kdata_use_begin_time=False, close_hour=0, close_minute=0, - one_day_trading_minutes=24 * 60) -> None: - super().__init__(entity_type, exchanges, entity_ids, codes, day_data, batch_size, force_update, sleeping_time, - default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour, - close_minute, level, kdata_use_begin_time, one_day_trading_minutes) - - def get_data_map(self): - return {} - - def generate_domain_id(self, entity, original_data): - return generate_kdata_id(entity.id, timestamp=original_data['timestamp'], level=self.level) - - def record(self, entity, start, end, size, timestamps): - the_quarters = get_year_quarters(start) - if not is_same_date(entity.timestamp, start) and len(the_quarters) > 1: - the_quarters = the_quarters[1:] - - param = { - 'security_item': entity, - 'quarters': the_quarters, - 'level': self.level.value - } - - security_item = param['security_item'] - quarters = param['quarters'] - level = param['level'] - - result_df = pd.DataFrame() - for year, quarter in quarters: - query_url = self.url.format(security_item.code, year, quarter) - response = requests.get(query_url) - response.encoding = 'gbk' - - try: - dfs = pd.read_html(response.text) - except ValueError as error: - self.logger.error(f'skip ({year}-{quarter:02d}){security_item.code}{security_item.name}({error})') - time.sleep(10.0) - continue - - if len(dfs) < 5: - time.sleep(10.0) - continue - - df = dfs[4].copy() - df = df.iloc[1:] - df.columns = ['timestamp', 'open', 'high', 'close', 'low', 'volume', 'turnover'] - df['name'] = security_item.name - df['level'] = level - df['timestamp'] = pd.to_datetime(df['timestamp']) - df['provider'] = 'sina' - - result_df = pd.concat([result_df, df]) - - self.logger.info(f'({security_item.code}{security_item.name})({year}-{quarter:02d})') - time.sleep(10.0) - - result_df = result_df.sort_values(by='timestamp') - - return result_df.to_dict(orient='records') - - -__all__ = ['ChinaIndexDayKdataRecorder'] - -if __name__ == '__main__': - ChinaIndexDayKdataRecorder().run() diff --git a/zvt/recorders/sina/meta/__init__.py b/zvt/recorders/sina/meta/__init__.py deleted file mode 100644 index 63e373ca..00000000 --- a/zvt/recorders/sina/meta/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# -*- coding: utf-8 -*- -from .sina_china_stock_category_recorder import * diff --git a/zvt/recorders/sina/meta/sina_china_stock_category_recorder.py b/zvt/recorders/sina/meta/sina_china_stock_category_recorder.py deleted file mode 100644 index fee668c0..00000000 --- a/zvt/recorders/sina/meta/sina_china_stock_category_recorder.py +++ /dev/null @@ -1,118 +0,0 @@ -# -*- coding: utf-8 -*- -import json - -import demjson -import pandas as pd -import requests - -from zvt.contract.api import df_to_db -from zvt.contract.recorder import Recorder, TimeSeriesDataRecorder -from zvt.utils.time_utils import now_pd_timestamp -from zvt.api.quote import china_stock_code_to_id -from zvt.domain import BlockStock, BlockCategory, Block - - -class SinaChinaBlockRecorder(Recorder): - provider = 'sina' - data_schema = Block - - # 用于抓取行业/概念/地域列表 - category_map_url = { - BlockCategory.industry: 'http://vip.stock.finance.sina.com.cn/q/view/newSinaHy.php', - BlockCategory.concept: 'http://money.finance.sina.com.cn/q/view/newFLJK.php?param=class' - # StockCategory.area: 'http://money.finance.sina.com.cn/q/view/newFLJK.php?param=area', - } - - def run(self): - # get stock blocks from sina - for category, url in self.category_map_url.items(): - resp = requests.get(url) - resp.encoding = 'GBK' - - tmp_str = resp.text - json_str = tmp_str[tmp_str.index('{'):tmp_str.index('}') + 1] - tmp_json = json.loads(json_str) - - the_list = [] - - for code in tmp_json: - name = tmp_json[code].split(',')[1] - entity_id = f'block_cn_{code}' - the_list.append({ - 'id': entity_id, - 'entity_id': entity_id, - 'entity_type': 'block', - 'exchange': 'cn', - 'code': code, - 'name': name, - 'category': category.value - }) - if the_list: - df = pd.DataFrame.from_records(the_list) - df_to_db(data_schema=self.data_schema, df=df, provider=self.provider, - force_update=True) - - self.logger.info(f"finish record sina blocks:{category.value}") - - -class SinaChinaBlockStockRecorder(TimeSeriesDataRecorder): - entity_provider = 'sina' - entity_schema = Block - - provider = 'sina' - data_schema = BlockStock - - # 用于抓取行业包含的股票 - category_stocks_url = 'http://vip.stock.finance.sina.com.cn/quotes_service/api/json_v2.php/Market_Center.getHQNodeData?page={}&num=5000&sort=symbol&asc=1&node={}&symbol=&_s_r_a=page' - - def __init__(self, entity_type='block', exchanges=None, entity_ids=None, codes=None, day_data=False, batch_size=10, - force_update=True, sleeping_time=5, default_size=2000, real_time=False, fix_duplicate_way='add', - start_timestamp=None, end_timestamp=None, close_hour=0, close_minute=0) -> None: - super().__init__(entity_type, exchanges, entity_ids, codes, day_data, batch_size, force_update, sleeping_time, - default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour, - close_minute) - - def record(self, entity, start, end, size, timestamps): - for page in range(1, 5): - resp = requests.get(self.category_stocks_url.format(page, entity.code)) - try: - if resp.text == 'null' or resp.text is None: - break - category_jsons = demjson.decode(resp.text) - the_list = [] - for category in category_jsons: - stock_code = category['code'] - stock_id = china_stock_code_to_id(stock_code) - block_id = entity.id - the_list.append({ - 'id': '{}_{}'.format(block_id, stock_id), - 'entity_id': block_id, - 'entity_type': 'block', - 'exchange': entity.exchange, - 'code': entity.code, - 'name': entity.name, - 'timestamp': now_pd_timestamp(), - 'stock_id': stock_id, - 'stock_code': stock_code, - 'stock_name': category['name'], - - }) - if the_list: - df = pd.DataFrame.from_records(the_list) - df_to_db(data_schema=self.data_schema, df=df, provider=self.provider, - force_update=True) - - self.logger.info('finish recording BlockStock:{},{}'.format(entity.category, entity.name)) - - except Exception as e: - self.logger.error("error:,resp.text:", e, resp.text) - self.sleep() - - -__all__ = ['SinaChinaBlockRecorder', 'SinaChinaBlockStockRecorder'] - -if __name__ == '__main__': - # init_log('sina_china_stock_category.log') - - recorder = SinaChinaBlockStockRecorder(codes=['new_cbzz']) - recorder.run() diff --git a/zvt/recorders/sina/money_flow/__init__.py b/zvt/recorders/sina/money_flow/__init__.py deleted file mode 100644 index c08fd679..00000000 --- a/zvt/recorders/sina/money_flow/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# -*- coding: utf-8 -*- -from .sina_block_money_flow_recorder import * -from .sina_stock_money_flow_recorder import * diff --git a/zvt/recorders/sina/money_flow/sina_block_money_flow_recorder.py b/zvt/recorders/sina/money_flow/sina_block_money_flow_recorder.py deleted file mode 100644 index 3a09c0f7..00000000 --- a/zvt/recorders/sina/money_flow/sina_block_money_flow_recorder.py +++ /dev/null @@ -1,95 +0,0 @@ -# -*- coding: utf-8 -*- -import time - -import requests - -from zvt.contract import IntervalLevel -from zvt.contract.recorder import FixedCycleDataRecorder -from zvt.utils.time_utils import to_pd_timestamp -from zvt.utils.utils import to_float -from zvt.domain import BlockMoneyFlow, BlockCategory, Block - - -# 实时资金流 -# 'http://vip.stock.finance.sina.com.cn/quotes_service/api/json_v2.php/MoneyFlow.ssl_bkzj_bk?page=1&num=20&sort=netamount&asc=0&fenlei=1' -# 'http://vip.stock.finance.sina.com.cn/quotes_service/api/json_v2.php/MoneyFlow.ssl_bkzj_bk?page=1&num=20&sort=netamount&asc=0&fenlei=0' - - -class SinaBlockMoneyFlowRecorder(FixedCycleDataRecorder): - # entity的信息从哪里来 - entity_provider = 'sina' - # entity的schema - entity_schema = Block - - # 记录的信息从哪里来 - provider = 'sina' - # 记录的schema - data_schema = BlockMoneyFlow - - url = 'http://vip.stock.finance.sina.com.cn/quotes_service/api/json_v2.php/MoneyFlow.ssl_bkzj_zjlrqs?page=1&num={}&sort=opendate&asc=0&bankuai={}%2F{}' - - def __init__(self, exchanges=None, entity_ids=None, codes=None, day_data=False, batch_size=10, - force_update=True, sleeping_time=10, default_size=2000, real_time=False, fix_duplicate_way='ignore', - start_timestamp=None, end_timestamp=None, close_hour=0, close_minute=0, level=IntervalLevel.LEVEL_1DAY, - kdata_use_begin_time=False, one_day_trading_minutes=24 * 60) -> None: - super().__init__('block', exchanges, entity_ids, codes, day_data, batch_size, force_update, sleeping_time, - default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour, - close_minute, level, kdata_use_begin_time, one_day_trading_minutes) - - def generate_url(self, category, code, number): - if category == BlockCategory.industry.value: - block = 0 - elif category == BlockCategory.concept.value: - block = 1 - - return self.url.format(number, block, code) - - def get_data_map(self): - return {} - - def record(self, entity, start, end, size, timestamps): - url = self.generate_url(category=entity.category, code=entity.code, number=size) - - resp = requests.get(url) - - opendate = "opendate" - avg_price = "avg_price" - avg_changeratio = 'avg_changeratio' - turnover = 'turnover' - netamount = 'netamount' - ratioamount = 'ratioamount' - r0_net = 'r0_net' - r0_ratio = 'r0_ratio' - r0x_ratio = 'r0x_ratio' - cnt_r0x_ratio = 'cnt_r0x_ratio' - - json_list = [] - try: - json_list = eval(resp.text) - except Exception as e: - resp.encoding = 'GBK' - self.logger.error(resp.text) - time.sleep(60 * 5) - - result_list = [] - for item in json_list: - result_list.append({ - 'name': entity.name, - 'timestamp': to_pd_timestamp(item['opendate']), - 'close': to_float(item['avg_price']), - 'change_pct': to_float(item['avg_changeratio']), - 'turnover_rate': to_float(item['turnover']) / 10000, - 'net_inflows': to_float(item['netamount']), - 'net_inflow_rate': to_float(item['ratioamount']), - 'net_main_inflows': to_float(item['r0_net']), - 'net_main_inflow_rate': to_float(item['r0_ratio']) - }) - - return result_list - - -__all__ = ['SinaBlockMoneyFlowRecorder'] - -if __name__ == '__main__': - SinaBlockMoneyFlowRecorder(codes=['new_fjzz']).run() - # SinaIndexMoneyFlowRecorder().run() diff --git a/zvt/recorders/sina/money_flow/sina_stock_money_flow_recorder.py b/zvt/recorders/sina/money_flow/sina_stock_money_flow_recorder.py deleted file mode 100644 index 9e04413f..00000000 --- a/zvt/recorders/sina/money_flow/sina_stock_money_flow_recorder.py +++ /dev/null @@ -1,141 +0,0 @@ -# -*- coding: utf-8 -*- -import time - -import requests - -from zvt.contract import IntervalLevel -from zvt.contract.recorder import FixedCycleDataRecorder -from zvt.utils.time_utils import to_pd_timestamp, is_same_date, now_pd_timestamp -from zvt.utils.utils import to_float -from zvt.domain import StockMoneyFlow, Stock, StockTradeDay - - -class SinaStockMoneyFlowRecorder(FixedCycleDataRecorder): - entity_provider = 'joinquant' - entity_schema = Stock - - provider = 'sina' - data_schema = StockMoneyFlow - - url = 'http://vip.stock.finance.sina.com.cn/quotes_service/api/json_v2.php/MoneyFlow.ssl_qsfx_lscjfb?page=1&num={}&sort=opendate&asc=0&daima={}' - - def __init__(self, exchanges=None, entity_ids=None, codes=None, day_data=False, batch_size=10, - force_update=True, sleeping_time=10, default_size=2000, real_time=False, fix_duplicate_way='ignore', - start_timestamp=None, end_timestamp=None, close_hour=0, close_minute=0, level=IntervalLevel.LEVEL_1DAY, - kdata_use_begin_time=False, one_day_trading_minutes=24 * 60) -> None: - super().__init__('stock', exchanges, entity_ids, codes, day_data, batch_size, force_update, sleeping_time, - default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour, - close_minute, level, kdata_use_begin_time, one_day_trading_minutes) - - def init_entities(self): - super().init_entities() - # 过滤掉退市的 - self.entities = [entity for entity in self.entities if - (entity.end_date is None) or (entity.end_date > now_pd_timestamp())] - - # TODO:more general for the case using StockTradeDay - def evaluate_start_end_size_timestamps(self, entity): - start, end, size, timestamps = super().evaluate_start_end_size_timestamps(entity) - if start: - trade_day = StockTradeDay.query_data(limit=1, order=StockTradeDay.timestamp.desc(), return_type='domain') - if trade_day: - if is_same_date(trade_day[0].timestamp, start): - size = 0 - return start, end, size, timestamps - - def generate_url(self, code, number): - return self.url.format(number, code) - - def get_data_map(self): - return {} - - def record(self, entity, start, end, size, timestamps): - param = { - 'url': self.generate_url(code='{}{}'.format(entity.exchange, entity.code), number=size), - 'security_item': entity - } - - resp = requests.get(param['url']) - # {opendate:"2019-04-29",trade:"10.8700",changeratio:"-0.0431338",turnover:"74.924",netamount:"-2903349.8500", - # ratioamount:"-0.155177",r0:"0.0000",r1:"2064153.0000",r2:"6485031.0000",r3:"10622169.2100",r0_net:"0.0000", - # r1_net:"2064153.0000",r2_net:"-1463770.0000",r3_net:"-3503732.8500"} - opendate = "opendate" - trade = "trade" - changeratio = 'changeratio' - turnover = 'turnover' - netamount = 'netamount' - ratioamount = 'ratioamount' - r0 = 'r0' - r1 = 'r1' - r2 = 'r2' - r3 = 'r3' - r0_net = 'r0_net' - r1_net = 'r1_net' - r2_net = 'r2_net' - r3_net = 'r3_net' - - json_list = [] - - try: - json_list = eval(resp.text) - except Exception as e: - resp.encoding = 'GBK' - self.logger.error(resp.text) - time.sleep(60 * 5) - - result_list = [] - for item in json_list: - amount = to_float(item['r0']) + to_float(item['r1']) + to_float(item['r2']) + to_float(item['r3']) - - result = { - 'timestamp': to_pd_timestamp(item['opendate']), - 'name': entity.name, - 'close': to_float(item['trade']), - 'change_pct': to_float(item['changeratio']), - 'turnover_rate': to_float(item['turnover']) / 10000, - 'net_inflows': to_float(item['netamount']), - 'net_inflow_rate': to_float(item['ratioamount']), - # # 主力=超大单+大单 - # net_main_inflows = Column(Float) - # net_main_inflow_rate = Column(Float) - # # 超大单 - # net_huge_inflows = Column(Float) - # net_huge_inflow_rate = Column(Float) - # # 大单 - # net_big_inflows = Column(Float) - # net_big_inflow_rate = Column(Float) - # - # # 中单 - # net_medium_inflows = Column(Float) - # net_medium_inflow_rate = Column(Float) - # # 小单 - # net_small_inflows = Column(Float) - # net_small_inflow_rate = Column(Float) - 'net_main_inflows': to_float(item['r0_net']) + to_float(item['r1_net']), - - 'net_huge_inflows': to_float(item['r0_net']), - - 'net_big_inflows': to_float(item['r1_net']), - - 'net_medium_inflows': to_float(item['r2_net']), - - 'net_small_inflows': to_float(item['r3_net']), - } - - if amount != 0: - result['net_main_inflow_rate'] = (to_float(item['r0_net']) + to_float(item['r1_net'])) / amount - result['net_huge_inflow_rate'] = to_float(item['r0_net']) / amount - result['net_big_inflow_rate'] = to_float(item['r1_net']) / amount - result['net_medium_inflow_rate'] = to_float(item['r2_net']) / amount - result['net_small_inflow_rate'] = to_float(item['r3_net']) / amount - - result_list.append(result) - - return result_list - - -__all__ = ['SinaStockMoneyFlowRecorder'] - -if __name__ == '__main__': - SinaStockMoneyFlowRecorder(codes=['000406']).run() - # SinaStockMoneyFlowRecorder().run() diff --git a/zvt/samples/__init__.py b/zvt/samples/__init__.py deleted file mode 100644 index 03202f6d..00000000 --- a/zvt/samples/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# -*- coding: utf-8 -*- -from .stock_traders import * \ No newline at end of file diff --git a/zvt/samples/stock_traders.py b/zvt/samples/stock_traders.py deleted file mode 100644 index 982705c4..00000000 --- a/zvt/samples/stock_traders.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -from zvt.contract import IntervalLevel -from zvt.factors.target_selector import TargetSelector -from zvt.factors.ma.ma_factor import CrossMaFactor -from zvt.factors import BullFactor -from zvt.trader.trader import StockTrader - - -class MyMaTrader(StockTrader): - def init_selectors(self, entity_ids, entity_schema, exchanges, codes, start_timestamp, end_timestamp, - adjust_type=None): - myselector = TargetSelector(entity_ids=entity_ids, entity_schema=entity_schema, exchanges=exchanges, - codes=codes, start_timestamp=start_timestamp, end_timestamp=end_timestamp, - provider='joinquant') - - myselector.add_filter_factor( - CrossMaFactor(entity_ids=entity_ids, entity_schema=entity_schema, exchanges=exchanges, - codes=codes, start_timestamp=start_timestamp, end_timestamp=end_timestamp, - windows=[5, 10], need_persist=False, adjust_type=adjust_type)) - - self.selectors.append(myselector) - - -class MyBullTrader(StockTrader): - def init_selectors(self, entity_ids, entity_schema, exchanges, codes, start_timestamp, end_timestamp, - adjust_type=None): - myselector = TargetSelector(entity_ids=entity_ids, entity_schema=entity_schema, exchanges=exchanges, - codes=codes, start_timestamp=start_timestamp, end_timestamp=end_timestamp, - provider='joinquant') - - myselector.add_filter_factor( - BullFactor(entity_ids=entity_ids, entity_schema=entity_schema, exchanges=exchanges, - codes=codes, start_timestamp=start_timestamp, end_timestamp=end_timestamp, - adjust_type=adjust_type)) - - self.selectors.append(myselector) - - -if __name__ == '__main__': - # single stock with cross ma factor - MyMaTrader(codes=['000338'], level=IntervalLevel.LEVEL_1DAY, start_timestamp='2018-01-01', - end_timestamp='2019-06-30', trader_name='000338_ma_trader').run() - - # single stock with bull factor - # MyBullTrader(codes=['000338'], level=IntervalLevel.LEVEL_1DAY, start_timestamp='2018-01-01', - # end_timestamp='2019-06-30', trader_name='000338_bull_trader').run() - - # multiple stocks with cross ma factor - # MyMaTrader(codes=SAMPLE_STOCK_CODES, level=IntervalLevel.LEVEL_1DAY, start_timestamp='2018-01-01', - # end_timestamp='2019-06-30', trader_name='sample_stocks_ma_trader').run() - - # multiple stocks with bull factor - # MyBullTrader(codes=SAMPLE_STOCK_CODES, level=IntervalLevel.LEVEL_1DAY, start_timestamp='2018-01-01', - # end_timestamp='2019-06-30', trader_name='sample_stocks_bull_trader').run() diff --git a/zvt/trader/__init__.py b/zvt/trader/__init__.py deleted file mode 100644 index 81d86e68..00000000 --- a/zvt/trader/__init__.py +++ /dev/null @@ -1,81 +0,0 @@ -# -*- coding: utf-8 -*- -import enum -from typing import Union, List - -import pandas as pd - -from zvt.contract import IntervalLevel -from zvt.utils.decorator import to_string - - -class TradingSignalType(enum.Enum): - open_long = 'open_long' - open_short = 'open_short' - keep_long = 'keep_long' - keep_short = 'keep_short' - close_long = 'close_long' - close_short = 'close_short' - - -@to_string -class TradingSignal: - def __init__(self, - entity_id: str, - due_timestamp: Union[str, pd.Timestamp], - happen_timestamp: Union[str, pd.Timestamp], - trading_level: IntervalLevel, - trading_signal_type: TradingSignalType, - position_pct: float = 0, - order_money: float = 0): - self.entity_id = entity_id - self.due_timestamp = due_timestamp - self.happen_timestamp = happen_timestamp - self.trading_level = trading_level - self.trading_signal_type = trading_signal_type - - # use position_pct or order_money - self.position_pct = position_pct - - # when close the position,just use position_pct - self.order_money = order_money - - -class TradingListener(object): - def on_trading_signals(self, trading_signals: List[TradingSignal]): - raise NotImplementedError - - def on_trading_signal(self, trading_signal: TradingSignal): - raise NotImplementedError - - def on_trading_open(self, timestamp): - raise NotImplementedError - - def on_trading_close(self, timestamp): - raise NotImplementedError - - def on_trading_finish(self, timestamp): - raise NotImplementedError - - def on_trading_error(self, timestamp, error): - raise NotImplementedError -# the __all__ is generated -__all__ = ['TradingSignalType', 'TradingListener'] - -# __init__.py structure: -# common code of the package -# export interface in __all__ which contains __all__ of its sub modules - -# import all from submodule trader -from .trader import * -from .trader import __all__ as _trader_all -__all__ += _trader_all - -# import all from submodule errors -from .errors import * -from .errors import __all__ as _errors_all -__all__ += _errors_all - -# import all from submodule account -from .account import * -from .account import __all__ as _account_all -__all__ += _account_all \ No newline at end of file diff --git a/zvt/trader/account.py b/zvt/trader/account.py deleted file mode 100644 index d9d204f0..00000000 --- a/zvt/trader/account.py +++ /dev/null @@ -1,640 +0,0 @@ -# -*- coding: utf-8 -*- - -import logging -import math -from typing import List - -from zvt.api import get_kdata -from zvt.api.quote import decode_entity_id, get_kdata_schema -from zvt.api.trader_info_api import get_trader_info, clear_trader -from zvt.contract import IntervalLevel, EntityMixin, AdjustType -from zvt.contract.api import get_db_session -from zvt.domain.trader_info import AccountStats, Position, Order, TraderInfo -from zvt.trader import TradingSignalType, TradingListener, TradingSignal -from zvt.trader.errors import NotEnoughMoneyError, InvalidOrderError, NotEnoughPositionError, InvalidOrderParamError, \ - WrongKdataError -from zvt.utils.pd_utils import pd_is_not_null -from zvt.utils.time_utils import to_pd_timestamp, to_time_str, TIME_FORMAT_ISO8601, is_same_date -from zvt.utils.utils import fill_domain_from_dict - -ORDER_TYPE_LONG = 'order_long' -ORDER_TYPE_SHORT = 'order_short' -ORDER_TYPE_CLOSE_LONG = 'order_close_long' -ORDER_TYPE_CLOSE_SHORT = 'order_close_short' - -from marshmallow_sqlalchemy import SQLAlchemyAutoSchema - - -# FIXME:better way for schema<->domain,now just dump to schema and use dict['field'] for operation -class AccountDayStatsSchema(SQLAlchemyAutoSchema): - class Meta: - model = AccountStats - include_relationships = True - - -class PositionSchema(SQLAlchemyAutoSchema): - class Meta: - model = Position - include_relationships = True - - -account_stats_schema = AccountDayStatsSchema() -position_schema = PositionSchema() - - -class AccountService(TradingListener): - logger = logging.getLogger(__name__) - - def get_current_position(self, entity_id): - """ - overwrite it to provide your real position - - :param entity_id: - """ - pass - - def get_current_account(self): - pass - - def order(self, entity_id, current_price, current_timestamp, order_amount=0, order_pct=1.0, order_price=0, - order_type=ORDER_TYPE_LONG, order_money=0): - pass - - # 开多,对于某些品种只能开多,比如中国股票 - def buy(self, entity_id, current_price, current_timestamp, order_amount=0, order_pct=1.0, order_price=0, - order_money=0): - self.order(entity_id, current_price, current_timestamp, order_amount, order_pct, order_price, - order_type=ORDER_TYPE_LONG, order_money=order_money) - - # 开空 - def sell(self, entity_id, current_price, current_timestamp, order_amount=0, order_pct=1.0, order_price=0, - order_money=0): - self.order(entity_id, current_price, current_timestamp, order_amount, order_pct, order_price, - order_type=ORDER_TYPE_SHORT, order_money=order_money) - - # 平多 - def close_long(self, entity_id, current_price, current_timestamp, order_amount=0, order_pct=1.0, order_price=0, - order_money=0): - self.order(entity_id, current_price, current_timestamp, order_amount, order_pct, order_price, - order_type=ORDER_TYPE_CLOSE_LONG, order_money=order_money) - - # 平空 - def close_short(self, entity_id, current_price, current_timestamp, order_amount=0, order_pct=1.0, order_price=0, - order_money=0): - self.order(entity_id, current_price, current_timestamp, order_amount, order_pct, order_price, - order_type=ORDER_TYPE_CLOSE_SHORT, order_money=order_money) - - @staticmethod - def trading_signal_to_order_type(trading_signal_type): - if trading_signal_type == TradingSignalType.open_long: - return ORDER_TYPE_LONG - if trading_signal_type == TradingSignalType.open_short: - return ORDER_TYPE_SHORT - if trading_signal_type == TradingSignalType.close_long: - return ORDER_TYPE_CLOSE_LONG - if trading_signal_type == TradingSignalType.close_short: - return ORDER_TYPE_CLOSE_SHORT - - -class SimAccountService(AccountService): - - def __init__(self, - entity_schema: EntityMixin, - trader_name, - timestamp, - provider=None, - level=IntervalLevel.LEVEL_1DAY, - base_capital=1000000, - buy_cost=0.001, - sell_cost=0.001, - slippage=0.001, - rich_mode=True, - adjust_type: AdjustType = None, - keep_history=False, - real_time=False, - kdata_use_begin_time=False): - self.entity_schema = entity_schema - self.base_capital = base_capital - self.buy_cost = buy_cost - self.sell_cost = sell_cost - self.slippage = slippage - self.rich_mode = rich_mode - self.adjust_type = adjust_type - self.trader_name = trader_name - - self.session = get_db_session('zvt', data_schema=TraderInfo) - self.provider = provider - self.level = level - self.start_timestamp = timestamp - self.keep_history = keep_history - self.real_time = real_time - self.kdata_use_begin_time = kdata_use_begin_time - - self.account = None - self.account = self.init_account() - - account_info = f'init_account,holding size:{len(self.account.positions)} profit:{self.account.profit} input_money:{self.account.input_money} ' \ - f'cash:{self.account.cash} value:{self.account.value} all_value:{self.account.all_value}' - self.logger.info(account_info) - - def input_money(self, money=1000000): - self.account.input_money += money - self.account.cash += money - - def clear_account(self): - trader_info = get_trader_info(session=self.session, trader_name=self.trader_name, return_type='domain', - limit=1) - - if trader_info: - self.logger.warning("trader:{} has run before,old result would be deleted".format(self.trader_name)) - clear_trader(session=self.session, trader_name=self.trader_name) - - def init_account(self) -> AccountStats: - # 清除历史数据 - if not self.keep_history: - self.clear_account() - - # 读取之前保存的账户 - if self.keep_history: - self.account = self.load_account() - if self.account: - return self.account - - # init trader info - entity_type = self.entity_schema.__name__.lower() - sim_account = TraderInfo(id=self.trader_name, - entity_id=f'trader_zvt_{self.trader_name}', - timestamp=self.start_timestamp, - trader_name=self.trader_name, - entity_type=entity_type, - start_timestamp=self.start_timestamp, - provider=self.provider, - level=self.level.value, - real_time=self.real_time, - kdata_use_begin_time=self.kdata_use_begin_time, - kdata_adjust_type=self.adjust_type.value) - self.session.add(sim_account) - self.session.commit() - - return AccountStats(entity_id=f'trader_zvt_{self.trader_name}', - timestamp=self.start_timestamp, - trader_name=self.trader_name, - cash=self.base_capital, - input_money=self.base_capital, - all_value=self.base_capital, - value=0, - closing=False) - - def load_account(self) -> AccountStats: - records = AccountStats.query_data(filters=[AccountStats.trader_name == self.trader_name], - order=AccountStats.timestamp.desc(), limit=1, return_type='domain') - if not records: - return self.account - latest_record: AccountStats = records[0] - - # create new orm object from latest record - account_dict = account_stats_schema.dump(latest_record) - del account_dict['id'] - del account_dict['positions'] - account = AccountStats() - fill_domain_from_dict(account, account_dict) - - positions: List[Position] = [] - for position_domain in latest_record.positions: - position_dict = position_schema.dump(position_domain) - self.logger.debug('current position:{}'.format(position_dict)) - del position_dict['id'] - del position_dict['account_stats'] - position = Position() - fill_domain_from_dict(position, position_dict) - positions.append(position) - - account.positions = positions - - return account - - def on_trading_open(self, timestamp): - self.logger.info('on_trading_open:{}'.format(timestamp)) - if is_same_date(timestamp, self.start_timestamp): - return - self.account = self.load_account() - - def on_trading_error(self, timestamp, error): - pass - - def on_trading_finish(self, timestamp): - pass - - def on_trading_signals(self, trading_signals: List[TradingSignal]): - for trading_signal in trading_signals: - try: - self.on_trading_signal(trading_signal) - except Exception as e: - self.logger.exception(e) - self.on_trading_error(timestamp=trading_signal.happen_timestamp, error=e) - - def on_trading_signal(self, trading_signal: TradingSignal): - entity_id = trading_signal.entity_id - happen_timestamp = trading_signal.happen_timestamp - order_type = AccountService.trading_signal_to_order_type(trading_signal.trading_signal_type) - trading_level = trading_signal.trading_level.value - if order_type: - try: - kdata = get_kdata(provider=self.provider, entity_id=entity_id, level=trading_level, - start_timestamp=happen_timestamp, end_timestamp=happen_timestamp, - limit=1, adjust_type=self.adjust_type) - except Exception as e: - self.logger.error(e) - raise WrongKdataError("could not get kdata") - - if pd_is_not_null(kdata): - entity_type, _, _ = decode_entity_id(kdata['entity_id'][0]) - - the_price = kdata['close'][0] - - if the_price: - self.order(entity_id=entity_id, current_price=the_price, - current_timestamp=happen_timestamp, order_pct=trading_signal.position_pct, - order_money=trading_signal.order_money, - order_type=order_type) - else: - self.logger.warning( - 'ignore trading signal,wrong kdata,entity_id:{},timestamp:{},kdata:{}'.format(entity_id, - happen_timestamp, - kdata.to_dict( - orient='records'))) - - else: - self.logger.warning( - 'ignore trading signal,could not get kdata,entity_id:{},timestamp:{}'.format(entity_id, - happen_timestamp)) - - def on_trading_close(self, timestamp): - self.logger.info('on_trading_close:{}'.format(timestamp)) - # remove the empty position - self.account.positions = [position for position in self.account.positions if - position.long_amount > 0 or position.short_amount > 0] - - # clear the data which need recomputing - the_id = '{}_{}'.format(self.trader_name, to_time_str(timestamp, TIME_FORMAT_ISO8601)) - - self.account.value = 0 - self.account.all_value = 0 - for position in self.account.positions: - entity_type, _, _ = decode_entity_id(position.entity_id) - data_schema = get_kdata_schema(entity_type, level=IntervalLevel.LEVEL_1DAY, adjust_type=self.adjust_type) - - kdata = get_kdata(provider=self.provider, level=IntervalLevel.LEVEL_1DAY, entity_id=position.entity_id, - order=data_schema.timestamp.desc(), - end_timestamp=timestamp, limit=1, adjust_type=self.adjust_type) - - closing_price = kdata['close'][0] - - position.available_long = position.long_amount - position.available_short = position.short_amount - - if closing_price: - if (position.long_amount is not None) and position.long_amount > 0: - position.value = position.long_amount * closing_price - self.account.value += position.value - elif (position.short_amount is not None) and position.short_amount > 0: - position.value = 2 * (position.short_amount * position.average_short_price) - position.value -= position.short_amount * closing_price - self.account.value += position.value - - # refresh profit - position.profit = (closing_price - position.average_long_price) \ - * position.long_amount - position.profit_rate = position.profit / ( - position.average_long_price * position.long_amount) - - else: - self.logger.warning( - 'could not refresh close value for position:{},timestamp:{}'.format(position.entity_id, - timestamp)) - - position.id = '{}_{}_{}'.format(self.trader_name, position.entity_id, - to_time_str(timestamp, TIME_FORMAT_ISO8601)) - position.timestamp = to_pd_timestamp(timestamp) - position.account_stats_id = the_id - - self.account.id = the_id - self.account.all_value = self.account.value + self.account.cash - self.account.closing = True - self.account.timestamp = to_pd_timestamp(timestamp) - self.account.profit = (self.account.all_value - self.account.input_money) / self.account.input_money - - self.session.add(self.account) - self.session.commit() - account_info = f'on_trading_close,holding size:{len(self.account.positions)} profit:{self.account.profit} input_money:{self.account.input_money} ' \ - f'cash:{self.account.cash} value:{self.account.value} all_value:{self.account.all_value}' - self.logger.info(account_info) - - def get_current_position(self, entity_id) -> Position: - """ - get current position to decide whether order could make - - :param entity_id: - :type entity_id: str - :return: - :rtype: None - """ - for position in self.account.positions: - if position.entity_id == entity_id: - return position - return None - - def get_current_account(self): - return self.account - - def update_position(self, current_position, order_amount, current_price, order_type, timestamp): - """ - - :param timestamp: - :type timestamp: - :param current_position: - :type current_position: Position - :param order_amount: - :type order_amount: - :param current_price: - :type current_price: - :param order_type: - :type order_type: - """ - if order_type == ORDER_TYPE_LONG: - need_money = (order_amount * current_price) * (1 + self.slippage + self.buy_cost) - if self.account.cash < need_money: - if self.rich_mode: - self.input_money() - else: - raise NotEnoughMoneyError() - - self.account.cash -= need_money - - # 计算平均价 - long_amount = current_position.long_amount + order_amount - if long_amount == 0: - current_position.average_long_price = 0 - current_position.average_long_price = (current_position.average_long_price * current_position.long_amount - + current_price * order_amount) / long_amount - - current_position.long_amount = long_amount - - if current_position.trading_t == 0: - current_position.available_long += order_amount - - elif order_type == ORDER_TYPE_SHORT: - need_money = (order_amount * current_price) * (1 + self.slippage + self.buy_cost) - if self.account.cash < need_money: - if self.rich_mode: - self.input_money() - else: - raise NotEnoughMoneyError() - - self.account.cash -= need_money - - short_amount = current_position.short_amount + order_amount - current_position.average_short_price = (current_position.average_short_price * current_position.short_amount - + current_price * order_amount) / short_amount - - current_position.short_amount = short_amount - - if current_position.trading_t == 0: - current_position.available_short += order_amount - - elif order_type == ORDER_TYPE_CLOSE_LONG: - self.account.cash += (order_amount * current_price * (1 - self.slippage - self.sell_cost)) - # FIXME:如果没卖完,重新计算计算平均价 - - current_position.available_long -= order_amount - current_position.long_amount -= order_amount - - elif order_type == ORDER_TYPE_CLOSE_SHORT: - self.account.cash += 2 * (order_amount * current_position.average_short_price) - self.account.cash -= order_amount * current_price * (1 + self.slippage + self.sell_cost) - - current_position.available_short -= order_amount - current_position.short_amount -= order_amount - - # save the order info to db - order_id = '{}_{}_{}_{}'.format(self.trader_name, order_type, current_position.entity_id, - to_time_str(timestamp, TIME_FORMAT_ISO8601)) - order = Order(id=order_id, - timestamp=to_pd_timestamp(timestamp), - trader_name=self.trader_name, - entity_id=current_position.entity_id, - order_price=current_price, - order_amount=order_amount, - order_type=order_type, - level=self.level.value, - status='success') - self.session.add(order) - self.session.commit() - - def order(self, entity_id, current_price, current_timestamp, order_amount=0, order_pct=1.0, order_price=0, - order_type=ORDER_TYPE_LONG, order_money=0): - """ - 下单 - - Parameters - ---------- - entity_id : str - 交易标的id - - current_price : float - 当前价格 - - current_timestamp: timestamp - 下单的时间 - - order_amount : int - 数量 - - order_pct : float - 使用可用现金(仓位)的百分比,0.0-1.0 - - order_price : float - 用于限价交易 - - order_type : {ORDER_TYPE_LONG,ORDER_TYPE_SHORT,ORDER_TYPE_CLOSE_LONG,ORDER_TYPE_CLOSE_SHORT} - 交易类型 - - Returns - - """ - - # 市价交易,就是买卖是"当时"并"一定"能成交的 - # 简单起见,目前只支持这种方式 - if order_price == 0: - current_position = self.get_current_position(entity_id=entity_id) - - if not current_position: - trading_t = self.entity_schema.get_trading_t() - current_position = Position(trader_name=self.trader_name, - entity_id=entity_id, - long_amount=0, - available_long=0, - average_long_price=0, - short_amount=0, - available_short=0, - average_short_price=0, - profit=0, - value=0, - trading_t=trading_t - ) - # add it to latest account - self.account.positions.append(current_position) - - # 按钱交易 - if order_money > 0: - # 开多 - if order_type == ORDER_TYPE_LONG: - if current_position.short_amount > 0: - raise InvalidOrderError("close the short position before open long") - - if order_money > self.account.cash: - if self.rich_mode: - self.input_money() - else: - raise NotEnoughMoneyError() - - cost = current_price * (1 + self.slippage + self.buy_cost) - # 买的数量 - order_amount = order_money // cost - - if order_amount < 1: - self.logger.error( - f'invalid order_money:{order_money}, cost:{cost}, order_amount:{order_amount}') - return - - self.update_position(current_position, order_amount, current_price, order_type, - current_timestamp) - # 开空 - elif order_type == ORDER_TYPE_SHORT: - if current_position.long_amount > 0: - raise InvalidOrderError("close the long position before open short") - - if order_money > self.account.cash: - if self.rich_mode: - self.input_money() - else: - raise NotEnoughMoneyError() - - cost = current_price * (1 + self.slippage + self.buy_cost) - - order_amount = order_money // cost - - if order_amount < 1: - self.logger.error( - f'invalid order_money:{order_money}, cost:{cost}, order_amount:{order_amount}') - return - self.update_position(current_position, order_amount, current_price, order_type, - current_timestamp) - else: - raise InvalidOrderParamError('close long/short not support order_money') - - # 按数量交易 - elif order_amount > 0: - # 开多 - if order_type == ORDER_TYPE_LONG: - if current_position.short_amount > 0: - raise InvalidOrderError("close the short position before open long") - - self.update_position(current_position, order_amount, current_price, order_type, current_timestamp) - # 开空 - elif order_type == ORDER_TYPE_SHORT: - if current_position.long_amount > 0: - raise InvalidOrderError("close the long position before open short") - - self.update_position(current_position, order_amount, current_price, order_type, current_timestamp) - # 平多 - elif order_type == ORDER_TYPE_CLOSE_LONG: - if current_position.available_long >= order_amount: - self.update_position(current_position, order_amount, current_price, order_type, - current_timestamp) - else: - raise NotEnoughPositionError() - # 平空 - elif order_type == ORDER_TYPE_CLOSE_SHORT: - if current_position.available_short >= order_amount: - self.update_position(current_position, order_amount, current_price, order_type, - current_timestamp) - else: - raise Exception("not enough position") - - # 按仓位比例交易 - elif 0 < order_pct <= 1: - # 开多 - if order_type == ORDER_TYPE_LONG: - if current_position.short_amount > 0: - raise InvalidOrderError("close the short position before open long") - - cost = current_price * (1 + self.slippage + self.buy_cost) - want_pay = self.account.cash * order_pct - # 买的数量 - order_amount = want_pay // cost - - if order_amount < 1: - if self.rich_mode: - self.input_money() - order_amount = max((self.account.cash * order_pct) // cost, 1) - else: - raise NotEnoughMoneyError() - self.update_position(current_position, order_amount, current_price, order_type, - current_timestamp) - # 开空 - elif order_type == ORDER_TYPE_SHORT: - if current_position.long_amount > 0: - raise InvalidOrderError("close the long position before open short") - - cost = current_price * (1 + self.slippage + self.buy_cost) - want_pay = self.account.cash * order_pct - - order_amount = want_pay // cost - - if order_amount < 1: - if self.rich_mode: - self.input_money() - order_amount = max((self.account.cash * order_pct) // cost, 1) - else: - raise NotEnoughMoneyError() - - self.update_position(current_position, order_amount, current_price, order_type, - current_timestamp) - - # 平多 - elif order_type == ORDER_TYPE_CLOSE_LONG: - if current_position.available_long > 0: - if order_pct == 1.0: - order_amount = current_position.available_long - else: - order_amount = math.floor(current_position.available_long * order_pct) - - if order_amount != 0: - self.update_position(current_position, order_amount, current_price, order_type, - current_timestamp) - else: - self.logger.warning( - f'{entity_id} available_long:{current_position.available_long} order_pct:{order_pct} order_amount:{order_amount}') - else: - raise NotEnoughPositionError() - # 平空 - elif order_type == ORDER_TYPE_CLOSE_SHORT: - if current_position.available_short > 0: - if order_pct == 1.0: - order_amount = current_position.available_short - else: - order_amount = math.floor(current_position.available_short * order_pct) - - if order_amount != 0: - self.update_position(current_position, order_amount, current_price, order_type, - current_timestamp) - else: - self.logger.warning( - f'{entity_id} available_long:{current_position.available_long} order_pct:{order_pct} order_amount:{order_amount}') - else: - raise Exception("not enough position") - - -# the __all__ is generated -__all__ = ['AccountDayStatsSchema', 'PositionSchema', 'AccountService', 'SimAccountService'] diff --git a/zvt/trader/trader.py b/zvt/trader/trader.py deleted file mode 100644 index f9c7ad6f..00000000 --- a/zvt/trader/trader.py +++ /dev/null @@ -1,497 +0,0 @@ -# -*- coding: utf-8 -*- -import logging -import time -from typing import List, Union, Type, Tuple - -import pandas as pd - -from zvt.api.trader_info_api import AccountStatsReader -from zvt.contract import IntervalLevel, EntityMixin, AdjustType -from zvt.contract.drawer import Drawer -from zvt.contract.normal_data import NormalData -from zvt.domain import Stock, AccountStats, Position -from zvt.factors.target_selector import TargetSelector -from zvt.trader import TradingSignal, TradingSignalType, TradingListener -from zvt.trader.account import SimAccountService -from zvt.utils.time_utils import to_pd_timestamp, now_pd_timestamp, to_time_str, is_same_date - - -class Trader(object): - entity_schema: Type[EntityMixin] = None - - def __init__(self, - entity_ids: List[str] = None, - exchanges: List[str] = None, - codes: List[str] = None, - start_timestamp: Union[str, pd.Timestamp] = None, - end_timestamp: Union[str, pd.Timestamp] = None, - provider: str = None, - level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY, - trader_name: str = None, - real_time: bool = False, - kdata_use_begin_time: bool = False, - draw_result: bool = True, - rich_mode: bool = False, - adjust_type: AdjustType = None, - profit_threshold=(3, -0.3), - keep_history=False) -> None: - assert self.entity_schema is not None - assert start_timestamp is not None - assert end_timestamp is not None - - self.logger = logging.getLogger(__name__) - - if trader_name: - self.trader_name = trader_name - else: - self.trader_name = type(self).__name__.lower() - - self.entity_ids = entity_ids - self.exchanges = exchanges - self.codes = codes - self.provider = provider - # make sure the min level selector correspond to the provider and level - self.level = IntervalLevel(level) - self.real_time = real_time - self.start_timestamp = to_pd_timestamp(start_timestamp) - self.end_timestamp = to_pd_timestamp(end_timestamp) - - self.trading_dates = self.entity_schema.get_trading_dates(start_date=self.start_timestamp, - end_date=self.end_timestamp) - - if real_time: - self.logger.info( - 'real_time mode, end_timestamp should be future,you could set it big enough for running forever') - assert self.end_timestamp >= now_pd_timestamp() - - self.kdata_use_begin_time = kdata_use_begin_time - self.draw_result = draw_result - self.rich_mode = rich_mode - - if type(adjust_type) is str: - adjust_type = AdjustType(adjust_type) - self.adjust_type = adjust_type - self.profit_threshold = profit_threshold - self.keep_history = keep_history - - self.level_map_long_targets = {} - self.level_map_short_targets = {} - self.trading_signals: List[TradingSignal] = [] - self.trading_signal_listeners: List[TradingListener] = [] - self.selectors: List[TargetSelector] = [] - - self.account_service = SimAccountService(entity_schema=self.entity_schema, - trader_name=self.trader_name, - timestamp=self.start_timestamp, - provider=self.provider, - level=self.level, - rich_mode=self.rich_mode, - adjust_type=self.adjust_type, - keep_history=self.keep_history) - - self.register_trading_signal_listener(self.account_service) - - self.init_selectors(entity_ids=self.entity_ids, entity_schema=self.entity_schema, exchanges=self.exchanges, - codes=self.codes, start_timestamp=self.start_timestamp, end_timestamp=self.end_timestamp, - adjust_type=self.adjust_type) - - if self.selectors: - self.trading_level_asc = list(set([IntervalLevel(selector.level) for selector in self.selectors])) - self.trading_level_asc.sort() - - self.logger.info(f'trader level:{self.level},selectors level:{self.trading_level_asc}') - - if self.level != self.trading_level_asc[0]: - raise Exception("trader level should be the min of the selectors") - - self.trading_level_desc = list(self.trading_level_asc) - self.trading_level_desc.reverse() - - # run selectors for history data at first - for selector in self.selectors: - selector.run() - - self.on_start() - - def on_start(self): - self.logger.info(f'trader:{self.trader_name} on_start') - - def init_selectors(self, entity_ids, entity_schema, exchanges, codes, start_timestamp, end_timestamp, - adjust_type=None): - """ - overwrite it to init selectors if you want to use selector/factor computing model - :param adjust_type: - - """ - pass - - def update_targets_by_level(self, level: IntervalLevel, long_targets: List[str], - short_targets: List[str], ) -> None: - """ - the trading signals is generated in min level,before that,we should cache targets of all levels - - :param level: - :param long_targets: - :param short_targets: - """ - self.logger.debug( - f'level:{level},old long targets:{self.level_map_long_targets.get(level)},new long targets:{long_targets}') - self.level_map_long_targets[level] = long_targets - - self.logger.debug( - f'level:{level},old short targets:{self.level_map_short_targets.get(level)},new short targets:{short_targets}') - self.level_map_short_targets[level] = short_targets - - def get_long_targets_by_level(self, level: IntervalLevel) -> List[str]: - return self.level_map_long_targets.get(level) - - def get_short_targets_by_level(self, level: IntervalLevel) -> List[str]: - return self.level_map_short_targets.get(level) - - def on_targets_selected_from_levels(self, timestamp) -> Tuple[List[str], List[str]]: - """ - this method's called in every min level cycle to select targets in all levels generated by the previous cycle - the default implementation is selecting the targets in all levels - overwrite it for your custom logic - - :param timestamp: current event time - :return: long targets, short targets - """ - - long_selected = None - - short_selected = None - - for level in self.trading_level_desc: - long_targets = self.level_map_long_targets.get(level) - # long must in all - if long_targets: - long_targets = set(long_targets) - if long_selected is None: - long_selected = long_targets - else: - long_selected = long_selected & long_targets - else: - long_selected = set() - - short_targets = self.level_map_short_targets.get(level) - # short any - if short_targets: - short_targets = set(short_targets) - if short_selected is None: - short_selected = short_targets - else: - short_selected = short_selected | short_targets - - return long_selected, short_selected - - def get_current_account(self) -> AccountStats: - return self.account_service.get_current_account() - - def get_current_positions(self) -> List[Position]: - return self.get_current_account().positions - - def long_position_control(self): - positions = self.get_current_positions() - - position_pct = 1.0 - if not positions: - # 没有仓位,买2成 - position_pct = 0.2 - elif len(positions) <= 10: - # 小于10个持仓,买5成 - position_pct = 0.5 - - # 买完 - return position_pct - - def short_position_control(self): - # 卖完 - return 1.0 - - def on_profit_control(self): - if self.profit_threshold and self.get_current_positions(): - positive = self.profit_threshold[0] - negative = self.profit_threshold[1] - close_long_entity_ids = [] - for position in self.get_current_positions(): - if position.available_long > 1: - # 止盈 - if position.profit_rate >= positive: - close_long_entity_ids.append(position.entity_id) - self.logger.info(f'close profit {position.profit_rate} for {position.entity_id}') - # 止损 - if position.profit_rate <= negative: - close_long_entity_ids.append(position.entity_id) - self.logger.info(f'cut lost {position.profit_rate} for {position.entity_id}') - - return close_long_entity_ids, None - return None, None - - def buy(self, due_timestamp, happen_timestamp, entity_ids, ignore_in_position=True): - if ignore_in_position: - account = self.get_current_account() - current_holdings = [] - if account.positions: - current_holdings = [position.entity_id for position in account.positions if position != None and - position.available_long > 0] - - entity_ids = set(entity_ids) - set(current_holdings) - - if entity_ids: - position_pct = self.long_position_control() - position_pct = (1.0 / len(entity_ids)) * position_pct - - for entity_id in entity_ids: - trading_signal = TradingSignal(entity_id=entity_id, - due_timestamp=due_timestamp, - happen_timestamp=happen_timestamp, - trading_signal_type=TradingSignalType.open_long, - trading_level=self.level, - position_pct=position_pct) - self.trading_signals.append(trading_signal) - - def sell(self, due_timestamp, happen_timestamp, entity_ids): - # current position - account = self.get_current_account() - current_holdings = [] - if account.positions: - current_holdings = [position.entity_id for position in account.positions if position != None and - position.available_long > 0] - - shorted = set(current_holdings) & set(entity_ids) - - if shorted: - position_pct = self.short_position_control() - - for entity_id in shorted: - trading_signal = TradingSignal(entity_id=entity_id, - due_timestamp=due_timestamp, - happen_timestamp=happen_timestamp, - trading_signal_type=TradingSignalType.close_long, - trading_level=self.level, - position_pct=position_pct) - self.trading_signals.append(trading_signal) - - def trade_the_targets(self, due_timestamp, happen_timestamp, long_selected, short_selected): - if short_selected: - self.sell(due_timestamp=due_timestamp, happen_timestamp=happen_timestamp, entity_ids=short_selected) - if long_selected: - self.buy(due_timestamp=due_timestamp, happen_timestamp=happen_timestamp, entity_ids=long_selected) - - def on_finish(self, timestmap): - self.on_trading_finish(timestmap) - # show the result - if self.draw_result: - import plotly.io as pio - pio.renderers.default = "browser" - reader = AccountStatsReader(trader_names=[self.trader_name]) - df = reader.data_df - drawer = Drawer(main_data=NormalData(df.copy()[['trader_name', 'timestamp', 'all_value']], - category_field='trader_name')) - drawer.draw_line(show=True) - - def on_targets_filtered(self, timestamp, level, selector: TargetSelector, long_targets: List[str], - short_targets: List[str]) -> Tuple[List[str], List[str]]: - """ - overwrite it to filter the targets from selector - - :param timestamp: the event time - :param level: the level - :param selector: the selector - :param long_targets: the long targets from the selector - :param short_targets: the short targets from the selector - :return: filtered long targets, filtered short targets - """ - self.logger.info(f'on_targets_filtered {level} long:{long_targets}') - - if len(long_targets) > 10: - long_targets = long_targets[0:10] - self.logger.info(f'on_targets_filtered {level} filtered long:{long_targets}') - - return long_targets, short_targets - - def in_trading_date(self, timestamp): - return to_time_str(timestamp) in self.trading_dates - - def on_time(self, timestamp: pd.Timestamp): - """ - called in every min level cycle - - :param timestamp: event time - """ - self.logger.debug(f'current timestamp:{timestamp}') - - def on_trading_signals(self, trading_signals: List[TradingSignal]): - for l in self.trading_signal_listeners: - l.on_trading_signals(trading_signals) - - def on_trading_signal(self, trading_signal: TradingSignal): - for l in self.trading_signal_listeners: - try: - l.on_trading_signal(trading_signal) - except Exception as e: - self.logger.exception(e) - l.on_trading_error(timestamp=trading_signal.happen_timestamp, error=e) - - def on_trading_open(self, timestamp): - for l in self.trading_signal_listeners: - l.on_trading_open(timestamp) - - def on_trading_close(self, timestamp): - for l in self.trading_signal_listeners: - l.on_trading_close(timestamp) - - def on_trading_finish(self, timestamp): - for l in self.trading_signal_listeners: - l.on_trading_finish(timestamp) - - def on_trading_error(self, timestamp, error): - for l in self.trading_signal_listeners: - l.on_trading_error(timestamp, error) - - def run(self): - # iterate timestamp of the min level,e.g,9:30,9:35,9.40...for 5min level - # timestamp represents the timestamp in kdata - for timestamp in self.entity_schema.get_interval_timestamps(start_date=self.start_timestamp, - end_date=self.end_timestamp, level=self.level): - - if not self.in_trading_date(timestamp=timestamp): - continue - - waiting_seconds = 0 - - if self.level == IntervalLevel.LEVEL_1DAY: - if is_same_date(timestamp, now_pd_timestamp()): - while True: - self.logger.info(f'time is:{now_pd_timestamp()},just smoke for minutes') - time.sleep(60) - current = now_pd_timestamp() - if current.hour >= 19: - waiting_seconds = 20 - break - - elif self.real_time: - # all selector move on to handle the coming data - if self.kdata_use_begin_time: - real_end_timestamp = timestamp + pd.Timedelta(seconds=self.level.to_second()) - else: - real_end_timestamp = timestamp - - seconds = (now_pd_timestamp() - real_end_timestamp).total_seconds() - waiting_seconds = self.level.to_second() - seconds - - # meaning the future kdata not ready yet,we could move on to check - if waiting_seconds > 0: - # iterate the selector from min to max which in finished timestamp kdata - for level in self.trading_level_asc: - if self.entity_schema.is_finished_kdata_timestamp(timestamp=timestamp, level=level): - for selector in self.selectors: - if selector.level == level: - selector.move_on(timestamp, self.kdata_use_begin_time, timeout=waiting_seconds + 20) - - # on_trading_open to setup the account - if self.level >= IntervalLevel.LEVEL_1DAY or ( - self.level != IntervalLevel.LEVEL_1DAY and self.entity_schema.is_open_timestamp(timestamp)): - self.on_trading_open(timestamp) - - self.on_time(timestamp=timestamp) - - # 一般来说selector(factors)计算 多标的 历史数据比较快,多级别的计算也比较方便,常用于全市场标的粗过滤 - # 更细节的控制可以在on_targets_filtered里进一步处理 - # 也可以在on_time里面设计一些自己的逻辑配合过滤 - if self.selectors: - # 多级别的遍历算法要点: - # 1)计算各级别的 标的,通过 on_targets_filtered 过滤,缓存在level_map_long_targets,level_map_short_targets - # 2)在最小的level通过 on_targets_selected_from_levels 根据多级别的缓存标的,生成最终的选中标的 - # 这里需要注意的是,小级别拿到上一个周期的大级别的标的,这是合理的 - for level in self.trading_level_asc: - # in every cycle, all level selector do its job in its time - if self.entity_schema.is_finished_kdata_timestamp(timestamp=timestamp, level=level): - all_long_targets = [] - all_short_targets = [] - - # 从该level的selector中过滤targets - for selector in self.selectors: - if selector.level == level: - long_targets = selector.get_open_long_targets(timestamp=timestamp) - short_targets = selector.get_open_short_targets(timestamp=timestamp) - - if long_targets or short_targets: - long_targets, short_targets = self.on_targets_filtered(timestamp=timestamp, - level=level, - selector=selector, - long_targets=long_targets, - short_targets=short_targets) - - if long_targets: - all_long_targets += long_targets - if short_targets: - all_short_targets += short_targets - - # 将各级别的targets缓存在level_map_long_targets,level_map_short_targets - self.update_targets_by_level(level, all_long_targets, all_short_targets) - - # the time always move on by min level step and we could check all targets of levels - # 1)the targets is generated for next interval - # 2)the acceptable price is next interval prices,you could buy it at current price - # if the time is before the timestamp(due_timestamp) when trading signal received - # 3)the suggest price the the close price for generating the signal(happen_timestamp) - due_timestamp = timestamp + pd.Timedelta(seconds=self.level.to_second()) - - # 在最小level生成最终的 交易信号 - if level == self.level: - long_selected, short_selected = self.on_targets_selected_from_levels(timestamp) - - # 处理 止赢 止损 - passive_short, _ = self.on_profit_control() - if passive_short: - if not short_selected: - short_selected = passive_short - else: - short_selected = list(set(short_selected) | set(passive_short)) - - self.logger.debug('timestamp:{},long_selected:{}'.format(due_timestamp, long_selected)) - self.logger.debug('timestamp:{},short_selected:{}'.format(due_timestamp, short_selected)) - - if long_selected or short_selected: - self.trade_the_targets(due_timestamp=due_timestamp, happen_timestamp=timestamp, - long_selected=long_selected, short_selected=short_selected) - - if self.trading_signals: - self.on_trading_signals(self.trading_signals) - # clear - self.trading_signals = [] - - # on_trading_close to calculate date account - if self.level >= IntervalLevel.LEVEL_1DAY or ( - self.level != IntervalLevel.LEVEL_1DAY and self.entity_schema.is_close_timestamp(timestamp)): - self.on_trading_close(timestamp) - - self.on_finish(timestamp) - - def register_trading_signal_listener(self, listener): - if listener not in self.trading_signal_listeners: - self.trading_signal_listeners.append(listener) - - def deregister_trading_signal_listener(self, listener): - if listener in self.trading_signal_listeners: - self.trading_signal_listeners.remove(listener) - - -class StockTrader(Trader): - entity_schema = Stock - - def __init__(self, entity_ids: List[str] = None, exchanges: List[str] = None, codes: List[str] = None, - start_timestamp: Union[str, pd.Timestamp] = None, end_timestamp: Union[str, pd.Timestamp] = None, - provider: str = None, level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY, - trader_name: str = None, real_time: bool = False, kdata_use_begin_time: bool = False, - draw_result: bool = True, rich_mode: bool = False, adjust_type: AdjustType = AdjustType.hfq, - profit_threshold=(3, -0.3), keep_history=False) -> None: - super().__init__(entity_ids, exchanges, codes, start_timestamp, end_timestamp, provider, level, trader_name, - real_time, kdata_use_begin_time, draw_result, rich_mode, adjust_type, profit_threshold, - keep_history) - - -# the __all__ is generated -__all__ = ['Trader', 'StockTrader'] diff --git a/zvt/ui/apps/factor_app.py b/zvt/ui/apps/factor_app.py deleted file mode 100644 index e5e1340d..00000000 --- a/zvt/ui/apps/factor_app.py +++ /dev/null @@ -1,300 +0,0 @@ -# -*- coding: utf-8 -*- -from typing import List - -import dash_core_components as dcc -import dash_daq as daq -import dash_html_components as html -from dash import dash -from dash.dependencies import Input, Output, State - -from zvt.api.trader_info_api import AccountStatsReader, OrderReader, get_order_securities -from zvt.api.trader_info_api import get_trader_info -from zvt.contract import Mixin -from zvt.contract import zvt_context, IntervalLevel -from zvt.contract.api import get_entities, get_schema_by_name, get_schema_columns -from zvt.contract.drawer import StackedDrawer -from zvt.domain import TraderInfo -from zvt.ui import zvt_app -from zvt.ui.components.dcc_components import get_account_stats_figure -from zvt.utils import pd_is_not_null - -account_readers = [] -order_readers = [] - -# init the data -traders: List[TraderInfo] = [] - -trader_names: List[str] = [] - - -def order_type_flag(order_type): - if order_type == 'order_long' or order_type == 'order_close_short': - return 'B' - else: - return 'S' - - -def order_type_color(order_type): - if order_type == 'order_long' or order_type == 'order_close_short': - return "#ec0000" - else: - return "#00da3c" - - -def load_traders(): - global traders - global trader_names - - traders = get_trader_info(return_type='domain') - account_readers.clear() - order_readers.clear() - for trader in traders: - account_readers.append(AccountStatsReader(trader_names=[trader.trader_name], level=trader.level)) - order_readers.append( - OrderReader(trader_names=[trader.trader_name], level=trader.level, start_timestamp=trader.start_timestamp)) - - trader_names = [item.trader_name for item in traders] - - -load_traders() - - -def factor_layout(): - layout = html.Div( - [ - # controls - html.Div( - className="three columns card", - children=[ - html.Div( - className="bg-white user-control", - children=[ - html.Div( - className="padding-top-bot", - children=[ - html.H6("select trader:"), - dcc.Dropdown(id='trader-selector', - placeholder='select the trader', - options=[{'label': item, 'value': i} for i, item in - enumerate(trader_names)] - ), - ], - ), - - # select entity_type - html.Div( - className="padding-top-bot", - children=[ - html.H6("select entity type:"), - dcc.Dropdown(id='entity-type-selector', - placeholder='select entity type', - options=[{'label': name, 'value': name} for name in - zvt_context.entity_schema_map.keys()], - value='stock', - clearable=False) - ], - ), - - # select entity - html.Div( - className="padding-top-bot", - children=[ - html.H6("select entity:"), - dcc.Dropdown(id='entity-selector', - placeholder='select entity') - ], - ), - # select levels - html.Div( - className="padding-top-bot", - children=[ - html.H6("select levels:"), - dcc.Dropdown( - id='levels-selector', - options=[{'label': level.name, 'value': level.value} for level in - (IntervalLevel.LEVEL_1WEEK, IntervalLevel.LEVEL_1DAY)], - value='1d', - multi=True - ) - ], - ), - # select factor - html.Div( - className="padding-top-bot", - children=[ - html.H6("select factor:"), - dcc.Dropdown(id='factor-selector', - placeholder='select factor', - options=[{'label': name, 'value': name} for name in - zvt_context.factor_cls_registry.keys()], - value='TechnicalFactor') - ] - ), - # select data - html.Div( - children=[ - html.Div( - [ - html.H6("related/all data to show in sub graph", - style={"display": "inline-block"}), - daq.BooleanSwitch( - id='data-switch', - on=True, - style={"display": "inline-block", - "float": "right", - "vertical-align": "middle", - "padding": "8px"} - ), - ], - ), - dcc.Dropdown(id='data-selector', placeholder='schema') - ], - style={"padding-top": "12px"} - ), - # select properties - html.Div( - children=[ - dcc.Dropdown(id='schema-column-selector', placeholder='properties') - ], - style={"padding-top": "6px"} - ), - - ]) - ]), - # Graph - html.Div( - className="nine columns card-left", - children=[ - html.Div( - id='trader-details', - className="bg-white", - ), - html.Div( - id='factor-details' - ) - ]) - ] - ) - - return layout - - -@zvt_app.callback( - [Output('trader-details', 'children'), - Output('entity-type-selector', 'options'), - Output('entity-selector', 'options')], - [Input('trader-selector', 'value'), Input('entity-type-selector', 'value')]) -def update_trader_details(trader_index, entity_type): - if trader_index is not None: - # change entity_type options - entity_type = traders[trader_index].entity_type - if not entity_type: - entity_type = 'stock' - entity_type_options = [{'label': entity_type, 'value': entity_type}] - - # account stats - account_stats = get_account_stats_figure(account_stats_reader=account_readers[trader_index]) - - # entities - entity_ids = get_order_securities(trader_name=trader_names[trader_index]) - df = get_entities(entity_type=entity_type, entity_ids=entity_ids, columns=['entity_id', 'code', 'name'], - index='entity_id') - entity_options = [{'label': f'{entity_id}({entity["name"]})', 'value': entity_id} for entity_id, entity in - df.iterrows()] - - return account_stats, entity_type_options, entity_options - else: - entity_type_options = [{'label': name, 'value': name} for name in zvt_context.entity_schema_map.keys()] - account_stats = None - df = get_entities(entity_type=entity_type, columns=['entity_id', 'code', 'name'], index='entity_id') - entity_options = [{'label': f'{entity_id}({entity["name"]})', 'value': entity_id} for entity_id, entity in - df.iterrows()] - return account_stats, entity_type_options, entity_options - - -@zvt_app.callback( - Output('data-selector', 'options'), - [Input('entity-type-selector', 'value'), Input('data-switch', 'on')]) -def update_entity_selector(entity_type, related): - if entity_type is not None: - if related: - schemas = zvt_context.entity_map_schemas.get(entity_type) - else: - schemas = zvt_context.schemas - return [{'label': schema.__name__, 'value': schema.__name__} for schema in schemas] - raise dash.PreventUpdate() - - -@zvt_app.callback( - Output('schema-column-selector', 'options'), - [Input('data-selector', 'value')]) -def update_column_selector(schema_name): - if schema_name: - schema = get_schema_by_name(name=schema_name) - cols = get_schema_columns(schema=schema) - - return [{'label': col, 'value': col} for col in cols] - raise dash.PreventUpdate() - - -@zvt_app.callback( - Output('factor-details', 'children'), - [Input('factor-selector', 'value'), - Input('entity-type-selector', 'value'), - Input('entity-selector', 'value'), - Input('levels-selector', 'value'), - Input('schema-column-selector', 'value')], - state=[State('trader-selector', 'value'), State('data-selector', 'value')]) -def update_factor_details(factor, entity_type, entity, levels, columns, trader_index, schema_name): - if factor and entity_type and entity and levels: - sub_df = None - # add sub graph - if columns: - if type(columns) == str: - columns = [columns] - columns = columns + ['entity_id', 'timestamp'] - schema: Mixin = get_schema_by_name(name=schema_name) - sub_df = schema.query_data(entity_id=entity, columns=columns) - - # add trading signals as annotation - annotation_df = None - if trader_index is not None: - order_reader = order_readers[trader_index] - annotation_df = order_reader.data_df.copy() - annotation_df = annotation_df[annotation_df.entity_id == entity].copy() - if pd_is_not_null(annotation_df): - annotation_df['value'] = annotation_df['order_price'] - annotation_df['flag'] = annotation_df['order_type'].apply(lambda x: order_type_flag(x)) - annotation_df['color'] = annotation_df['order_type'].apply(lambda x: order_type_color(x)) - print(annotation_df.tail()) - - if type(levels) is list and len(levels) >= 2: - levels.sort() - drawers = [] - for level in levels: - drawers.append(zvt_context.factor_cls_registry[factor]( - entity_schema=zvt_context.entity_schema_map[entity_type], - level=level, entity_ids=[entity]).drawer()) - stacked = StackedDrawer(*drawers) - - return dcc.Graph( - id=f'{factor}-{entity_type}-{entity}', - figure=stacked.draw_kline(show=False, height=900)) - else: - if type(levels) is list: - level = levels[0] - else: - level = levels - drawer = zvt_context.factor_cls_registry[factor](entity_schema=zvt_context.entity_schema_map[entity_type], - level=level, - entity_ids=[entity], - need_persist=False).drawer() - if pd_is_not_null(sub_df): - drawer.add_sub_df(sub_df) - if pd_is_not_null(annotation_df): - drawer.annotation_df = annotation_df - - return dcc.Graph( - id=f'{factor}-{entity_type}-{entity}', - figure=drawer.draw_kline(show=False, height=800)) - raise dash.PreventUpdate() diff --git a/zvt/ui/assets/base.css b/zvt/ui/assets/base.css deleted file mode 100644 index 4c1fa9ff..00000000 --- a/zvt/ui/assets/base.css +++ /dev/null @@ -1,414 +0,0 @@ -/* Table of contents -–––––––––––––––––––––––––––––––––––––––––––––––––– -- Plotly.js -- Grid -- Base Styles -- Typography -- Links -- Buttons -- Forms -- Lists -- Code -- Tables -- Spacing -- Utilities -- Clearing -- Media Queries -*/ - -/* PLotly.js -–––––––––––––––––––––––––––––––––––––––––––––––––– */ -/* plotly.js's modebar's z-index is 1001 by default - * https://github.com/plotly/plotly.js/blob/7e4d8ab164258f6bd48be56589dacd9bdd7fded2/src/css/_modebar.scss#L5 - * In case a dropdown is above the graph, the dropdown's options - * will be rendered below the modebar - * Increase the select option's z-index - */ - -/* This was actually not quite right - - dropdowns were overlapping each other (edited October 26) - -.Select { - z-index: 1002; -}*/ - -/* Grid -–––––––––––––––––––––––––––––––––––––––––––––––––– */ -.container { - position: relative; - width: 100%; - max-width: 960px; - margin: 0 auto; - padding: 0 20px; - box-sizing: border-box; } -.column, -.columns { - width: 100%; - float: left; - box-sizing: border-box; } - -/* For devices larger than 400px */ -@media (min-width: 400px) { - .container { - width: 85%; - padding: 0; } -} - -/* For devices larger than 550px */ -@media (min-width: 550px) { - .container { - width: 80%; } - .column, - .columns { - margin-left: 4%; } - .column:first-child, - .columns:first-child { - margin-left: 0; } - - .one.column, - .one.columns { width: 4.66666666667%; } - .two.columns { width: 13.3333333333%; } - .three.columns { width: 22%; } - .four.columns { width: 30.6666666667%; } - .five.columns { width: 39.3333333333%; } - .six.columns { width: 48%; } - .seven.columns { width: 56.6666666667%; } - .eight.columns { width: 65.3333333333%; } - .nine.columns { width: 74.0%; } - .ten.columns { width: 82.6666666667%; } - .eleven.columns { width: 91.3333333333%; } - .twelve.columns { width: 100%; margin-left: 0; } - - .one-third.column { width: 30.6666666667%; } - .two-thirds.column { width: 65.3333333333%; } - - .one-half.column { width: 48%; } - - /* Offsets */ - .offset-by-one.column, - .offset-by-one.columns { margin-left: 8.66666666667%; } - .offset-by-two.column, - .offset-by-two.columns { margin-left: 17.3333333333%; } - .offset-by-three.column, - .offset-by-three.columns { margin-left: 26%; } - .offset-by-four.column, - .offset-by-four.columns { margin-left: 34.6666666667%; } - .offset-by-five.column, - .offset-by-five.columns { margin-left: 43.3333333333%; } - .offset-by-six.column, - .offset-by-six.columns { margin-left: 52%; } - .offset-by-seven.column, - .offset-by-seven.columns { margin-left: 60.6666666667%; } - .offset-by-eight.column, - .offset-by-eight.columns { margin-left: 69.3333333333%; } - .offset-by-nine.column, - .offset-by-nine.columns { margin-left: 78.0%; } - .offset-by-ten.column, - .offset-by-ten.columns { margin-left: 86.6666666667%; } - .offset-by-eleven.column, - .offset-by-eleven.columns { margin-left: 95.3333333333%; } - - .offset-by-one-third.column, - .offset-by-one-third.columns { margin-left: 34.6666666667%; } - .offset-by-two-thirds.column, - .offset-by-two-thirds.columns { margin-left: 69.3333333333%; } - - .offset-by-one-half.column, - .offset-by-one-half.columns { margin-left: 52%; } - -} - - -/* Base Styles -–––––––––––––––––––––––––––––––––––––––––––––––––– */ -/* NOTE -html is set to 62.5% so that all the REM measurements throughout Skeleton -are based on 10px sizing. So basically 1.5rem = 15px :) */ -html { - font-size: 62.5%; } -body { - font-size: 1.5em; /* currently ems cause chrome bug misinterpreting rems on body element */ - line-height: 1.6; - font-weight: 400; - font-family: "Open Sans", "HelveticaNeue", "Helvetica Neue", Helvetica, Arial, sans-serif; - color: rgb(50, 50, 50); } - - -/* Typography -–––––––––––––––––––––––––––––––––––––––––––––––––– */ -h1, h2, h3, h4, h5, h6 { - margin-top: 0; - margin-bottom: 0; - font-weight: 300; } -h1 { font-size: 4.5rem; line-height: 1.2; letter-spacing: -.1rem; margin-bottom: 2rem; } -h2 { font-size: 3.6rem; line-height: 1.25; letter-spacing: -.1rem; margin-bottom: 1.8rem; margin-top: 1.8rem;} -h3 { font-size: 3.0rem; line-height: 1.3; letter-spacing: -.1rem; margin-bottom: 1.5rem; margin-top: 1.5rem;} -h4 { font-size: 2.6rem; line-height: 1.35; letter-spacing: -.08rem; margin-bottom: 1.2rem; margin-top: 1.2rem;} -h5 { font-size: 2.2rem; line-height: 1.5; letter-spacing: -.05rem; margin-bottom: 0.6rem; margin-top: 0.6rem;} -h6 { font-size: 2.0rem; line-height: 1.6; letter-spacing: 0; margin-bottom: 0.75rem; margin-top: 0.75rem;} - -p { - margin-top: 0; } - - -/* Blockquotes -–––––––––––––––––––––––––––––––––––––––––––––––––– */ -blockquote { - border-left: 4px lightgrey solid; - padding-left: 1rem; - margin-top: 2rem; - margin-bottom: 2rem; - margin-left: 0rem; -} - - -/* Links -–––––––––––––––––––––––––––––––––––––––––––––––––– */ -a { - color: #1EAEDB; - text-decoration: underline; - cursor: pointer;} -a:hover { - color: #0FA0CE; } - - -/* Buttons -–––––––––––––––––––––––––––––––––––––––––––––––––– */ -.button, -button, -input[type="submit"], -input[type="reset"], -input[type="button"] { - display: inline-block; - height: 38px; - padding: 0 30px; - color: #555; - text-align: center; - font-size: 11px; - font-weight: 600; - line-height: 38px; - letter-spacing: .1rem; - text-transform: uppercase; - text-decoration: none; - white-space: nowrap; - background-color: transparent; - border-radius: 4px; - border: 1px solid #bbb; - cursor: pointer; - box-sizing: border-box; } -.button:hover, -button:hover, -input[type="submit"]:hover, -input[type="reset"]:hover, -input[type="button"]:hover, -.button:focus, -button:focus, -input[type="submit"]:focus, -input[type="reset"]:focus, -input[type="button"]:focus { - color: #333; - border-color: #888; - outline: 0; } -.button.button-primary, -button.button-primary, -input[type="submit"].button-primary, -input[type="reset"].button-primary, -input[type="button"].button-primary { - color: #FFF; - background-color: #33C3F0; - border-color: #33C3F0; } -.button.button-primary:hover, -button.button-primary:hover, -input[type="submit"].button-primary:hover, -input[type="reset"].button-primary:hover, -input[type="button"].button-primary:hover, -.button.button-primary:focus, -button.button-primary:focus, -input[type="submit"].button-primary:focus, -input[type="reset"].button-primary:focus, -input[type="button"].button-primary:focus { - color: #FFF; - background-color: #1EAEDB; - border-color: #1EAEDB; } - - -/* Forms -–––––––––––––––––––––––––––––––––––––––––––––––––– */ -input[type="email"], -input[type="number"], -input[type="search"], -input[type="text"], -input[type="tel"], -input[type="url"], -input[type="password"], -textarea, -select { - height: 38px; - padding: 6px 10px; /* The 6px vertically centers text on FF, ignored by Webkit */ - background-color: #fff; - border: 1px solid #D1D1D1; - border-radius: 4px; - box-shadow: none; - box-sizing: border-box; - font-family: inherit; - font-size: inherit; /*https://stackoverflow.com/questions/6080413/why-doesnt-input-inherit-the-font-from-body*/} -/* Removes awkward default styles on some inputs for iOS */ -input[type="email"], -input[type="number"], -input[type="search"], -input[type="text"], -input[type="tel"], -input[type="url"], -input[type="password"], -textarea { - -webkit-appearance: none; - -moz-appearance: none; - appearance: none; } -textarea { - min-height: 65px; - padding-top: 6px; - padding-bottom: 6px; } -input[type="email"]:focus, -input[type="number"]:focus, -input[type="search"]:focus, -input[type="text"]:focus, -input[type="tel"]:focus, -input[type="url"]:focus, -input[type="password"]:focus, -textarea:focus, -select:focus { - border: 1px solid #33C3F0; - outline: 0; } -label, -legend { - display: block; - margin-bottom: 0px; } -fieldset { - padding: 0; - border-width: 0; } -input[type="checkbox"], -input[type="radio"] { - display: inline; } -label > .label-body { - display: inline-block; - margin-left: .5rem; - font-weight: normal; } - - -/* Lists -–––––––––––––––––––––––––––––––––––––––––––––––––– */ -ul { - list-style: circle inside; } -ol { - list-style: decimal inside; } -ol, ul { - padding-left: 0; - margin-top: 0; } -ul ul, -ul ol, -ol ol, -ol ul { - margin: 1.5rem 0 1.5rem 3rem; - font-size: 90%; } -li { - margin-bottom: 1rem; } - - -/* Tables -–––––––––––––––––––––––––––––––––––––––––––––––––– */ -table { - border-collapse: collapse; -} -th, -td { - padding: 12px 15px; - text-align: left; - border-bottom: 1px solid #E1E1E1; } -th:first-child, -td:first-child { - padding-left: 0; } -th:last-child, -td:last-child { - padding-right: 0; } - - -/* Spacing -–––––––––––––––––––––––––––––––––––––––––––––––––– */ -button, -.button { - margin-bottom: 0rem; } -input, -textarea, -select, -fieldset { - margin-bottom: 0rem; } -pre, -dl, -figure, -table, -form { - margin-bottom: 0rem; } -p, -ul, -ol { - margin-bottom: 0.75rem; } - -/* Utilities -–––––––––––––––––––––––––––––––––––––––––––––––––– */ -.u-full-width { - width: 100%; - box-sizing: border-box; } -.u-max-full-width { - max-width: 100%; - box-sizing: border-box; } -.u-pull-right { - float: right; } -.u-pull-left { - float: left; } - - -/* Misc -–––––––––––––––––––––––––––––––––––––––––––––––––– */ -hr { - margin-top: 3rem; - margin-bottom: 3.5rem; - border-width: 0; - border-top: 1px solid #E1E1E1; } - - -/* Clearing -–––––––––––––––––––––––––––––––––––––––––––––––––– */ - -/* Self Clearing Goodness */ -.container:after, -.row:after, -.u-cf { - content: ""; - display: table; - clear: both; } - - -/* Media Queries -–––––––––––––––––––––––––––––––––––––––––––––––––– */ -/* -Note: The best way to structure the use of media queries is to create the queries -near the relevant code. For example, if you wanted to change the styles for buttons -on small devices, paste the mobile query code up in the buttons section and style it -there. -*/ - - -/* Larger than mobile */ -@media (min-width: 400px) {} - -/* Larger than phablet (also point when grid becomes active) */ -@media (min-width: 550px) {} - -/* Larger than tablet */ -@media (min-width: 750px) {} - -/* Larger than desktop */ -@media (min-width: 1000px) {} - -/* Larger than Desktop HD */ -@media (min-width: 1200px) {} \ No newline at end of file diff --git a/zvt/ui/components/__init__.py b/zvt/ui/components/__init__.py deleted file mode 100644 index 7c68785e..00000000 --- a/zvt/ui/components/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- \ No newline at end of file diff --git a/zvt/ui/components/dcc_components.py b/zvt/ui/components/dcc_components.py deleted file mode 100644 index 41733521..00000000 --- a/zvt/ui/components/dcc_components.py +++ /dev/null @@ -1,71 +0,0 @@ -# -*- coding: utf-8 -*- - -import dash_core_components as dcc - -from zvt.api.quote import decode_entity_id, get_kdata_schema -from zvt.api.trader_info_api import OrderReader, AccountStatsReader -from zvt.contract.drawer import Drawer -from zvt.contract.reader import DataReader -from zvt.contract.zvt_context import entity_schema_map -from zvt.utils.pd_utils import pd_is_not_null - - -def order_type_color(order_type): - if order_type == 'order_long' or order_type == 'order_close_short': - return "#ec0000" - else: - return "#00da3c" - - -def order_type_flag(order_type): - if order_type == 'order_long' or order_type == 'order_close_short': - return 'B' - else: - return 'S' - - -def get_trading_signals_figure(order_reader: OrderReader, - entity_id: str, - start_timestamp=None, - end_timestamp=None, - adjust_type=None): - entity_type, _, _ = decode_entity_id(entity_id) - - data_schema = get_kdata_schema(entity_type=entity_type, level=order_reader.level, adjust_type=adjust_type) - if not start_timestamp: - start_timestamp = order_reader.start_timestamp - if not end_timestamp: - end_timestamp = order_reader.end_timestamp - kdata_reader = DataReader(entity_ids=[entity_id], data_schema=data_schema, - entity_schema=entity_schema_map.get(entity_type), - start_timestamp=start_timestamp, - end_timestamp=end_timestamp, - level=order_reader.level) - - # generate the annotation df - order_reader.move_on(timeout=0) - df = order_reader.data_df.copy() - df = df[df.entity_id == entity_id].copy() - if pd_is_not_null(df): - df['value'] = df['order_price'] - df['flag'] = df['order_type'].apply(lambda x: order_type_flag(x)) - df['color'] = df['order_type'].apply(lambda x: order_type_color(x)) - print(df.tail()) - - drawer = Drawer(main_df=kdata_reader.data_df, annotation_df=df) - return drawer.draw_kline(show=False, height=800) - - -def get_account_stats_figure(account_stats_reader: AccountStatsReader): - graph_list = [] - - # 账户统计曲线 - if account_stats_reader: - fig = account_stats_reader.draw_line(show=False) - - for trader_name in account_stats_reader.trader_names: - graph_list.append(dcc.Graph( - id='{}-account'.format(trader_name), - figure=fig)) - - return graph_list diff --git a/zvt/utils/__init__.py b/zvt/utils/__init__.py deleted file mode 100644 index dfa96dc8..00000000 --- a/zvt/utils/__init__.py +++ /dev/null @@ -1,32 +0,0 @@ -# -*- coding: utf-8 -*- -# the __all__ is generated -__all__ = [] - -# __init__.py structure: -# common code of the package -# export interface in __all__ which contains __all__ of its sub modules - -# import all from submodule decorator -from .decorator import * -from .decorator import __all__ as _decorator_all -__all__ += _decorator_all - -# import all from submodule time_utils -from .time_utils import * -from .time_utils import __all__ as _time_utils_all -__all__ += _time_utils_all - -# import all from submodule utils -from .utils import * -from .utils import __all__ as _utils_all -__all__ += _utils_all - -# import all from submodule zip_utils -from .zip_utils import * -from .zip_utils import __all__ as _zip_utils_all -__all__ += _zip_utils_all - -# import all from submodule pd_utils -from .pd_utils import * -from .pd_utils import __all__ as _pd_utils_all -__all__ += _pd_utils_all \ No newline at end of file diff --git a/zvt/utils/decorator.py b/zvt/utils/decorator.py deleted file mode 100644 index 904bc565..00000000 --- a/zvt/utils/decorator.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -def to_string(cls): - def __str__(self): - return '%s(%s)' % ( - type(self).__name__, - ', '.join('%s=%s' % item for item in vars(self).items()) - ) - - cls.__str__ = __str__ - return cls -# the __all__ is generated -__all__ = ['to_string'] \ No newline at end of file diff --git a/zvt/utils/git_utils.py b/zvt/utils/git_utils.py deleted file mode 100644 index 691a5c46..00000000 --- a/zvt/utils/git_utils.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -import subprocess - - -def get_git_user_name(): - try: - return subprocess.check_output(['git', 'config', '--get', 'user.name']).decode('utf8').strip() - except: - return "foolcage" - - -def get_git_user_email(): - try: - return subprocess.check_output(['git', 'config', '--get', 'user.email']).decode('utf8').strip() - except: - return "" diff --git a/zvt/utils/pd_utils.py b/zvt/utils/pd_utils.py deleted file mode 100644 index b25dcab1..00000000 --- a/zvt/utils/pd_utils.py +++ /dev/null @@ -1,74 +0,0 @@ -# -*- coding: utf-8 -*- -from typing import List, Union - -import pandas as pd - - -def pd_is_not_null(df: Union[pd.DataFrame, pd.Series]): - return df is not None and not df.empty - - -def index_df(df, index='timestamp', inplace=True, drop=False, time_field='timestamp'): - if time_field: - df[time_field] = pd.to_datetime(df[time_field]) - - if inplace: - df.set_index(index, drop=drop, inplace=inplace) - else: - df = df.set_index(index, drop=drop, inplace=inplace) - - if type(index) == str: - df = df.sort_index() - elif type(index) == list: - df.index.names = index - level = list(range(len(index))) - df = df.sort_index(level=level) - return df - - -def normal_index_df(df, category_field='entity_id', time_filed='timestamp', drop=True): - index = [category_field, time_filed] - if is_normal_df(df): - return df - - return index_df(df=df, index=index, drop=drop, time_field='timestamp') - - -def is_normal_df(df, category_field='entity_id', time_filed='timestamp'): - if pd_is_not_null(df): - names = df.index.names - - if len(names) == 2 and names[0] == category_field and names[1] == time_filed: - return True - - return False - - -def df_subset(df, columns=None): - if columns: - return df.loc[:, columns] - return df - - -def fill_with_same_index(df_list: List[pd.DataFrame]): - idx = None - for df in df_list: - if idx is None: - idx = df.index - else: - idx = idx.append(df.index).drop_duplicates() - idx = idx.sort_values() - - result = [] - for df in df_list: - # print(df[df.index.duplicated()]) - added_index = idx.difference(df.index.drop_duplicates()) - added_df = pd.DataFrame(index=added_index, columns=df.columns) - - # df1 = df.reindex(idx) - df1 = df.append(added_df) - df1 = df1.sort_index() - result.append(df1) - return result -# the __all__ is generated -__all__ = ['pd_is_not_null', 'index_df', 'normal_index_df', 'is_normal_df', 'df_subset', 'fill_with_same_index'] \ No newline at end of file diff --git a/zvt/utils/time_utils.py b/zvt/utils/time_utils.py deleted file mode 100644 index 9b18fcb2..00000000 --- a/zvt/utils/time_utils.py +++ /dev/null @@ -1,199 +0,0 @@ -# -*- coding: utf-8 -*- -import datetime -import math - -import arrow -import pandas as pd -import tzlocal - -from zvt.contract import IntervalLevel - -CHINA_TZ = 'Asia/Shanghai' - -TIME_FORMAT_ISO8601 = "YYYY-MM-DDTHH:mm:ss.SSS" - -TIME_FORMAT_DAY = 'YYYY-MM-DD' - -TIME_FORMAT_DAY1 = 'YYYYMMDD' - -TIME_FORMAT_MINUTE = 'YYYYMMDDHHmm' - -TIME_FORMAT_MINUTE1 = 'HH:mm' - -TIME_FORMAT_MINUTE2 = "YYYY-MM-DD HH:mm:ss" - - -# ms(int) or second(float) or str -def to_pd_timestamp(the_time) -> pd.Timestamp: - if the_time is None: - return None - if type(the_time) == int: - return pd.Timestamp.fromtimestamp(the_time / 1000) - - if type(the_time) == float: - return pd.Timestamp.fromtimestamp(the_time) - - return pd.Timestamp(the_time) - - -def to_timestamp(the_time): - return int(to_pd_timestamp(the_time).tz_localize(tzlocal.get_localzone()).timestamp() * 1000) - - -def now_timestamp(): - return int(pd.Timestamp.utcnow().timestamp() * 1000) - - -def now_pd_timestamp() -> pd.Timestamp: - return pd.Timestamp.now() - - -def today() -> pd.Timestamp: - return pd.Timestamp.today() - - -def to_time_str(the_time, fmt=TIME_FORMAT_DAY): - try: - return arrow.get(to_pd_timestamp(the_time)).format(fmt) - except Exception as e: - return the_time - - -def now_time_str(fmt=TIME_FORMAT_DAY): - return to_time_str(the_time=now_pd_timestamp(), fmt=fmt) - - -def next_date(the_time, days=1): - return to_pd_timestamp(the_time) + datetime.timedelta(days=days) - - -def is_same_date(one, two): - return to_pd_timestamp(one).date() == to_pd_timestamp(two).date() - - -def is_same_time(one, two): - return to_timestamp(one) == to_timestamp(two) - - -def get_year_quarter(time): - time = to_pd_timestamp(time) - return time.year, ((time.month - 1) // 3) + 1 - - -def day_offset_today(offset=0): - return now_pd_timestamp() + datetime.timedelta(days=offset) - - -def get_year_quarters(start, end=pd.Timestamp.now()): - start_year_quarter = get_year_quarter(start) - current_year_quarter = get_year_quarter(end) - if current_year_quarter[0] == start_year_quarter[0]: - return [(current_year_quarter[0], x) for x in range(start_year_quarter[1], current_year_quarter[1] + 1)] - elif current_year_quarter[0] - start_year_quarter[0] == 1: - return [(start_year_quarter[0], x) for x in range(start_year_quarter[1], 5)] + \ - [(current_year_quarter[0], x) for x in range(1, current_year_quarter[1] + 1)] - elif current_year_quarter[0] - start_year_quarter[0] > 1: - return [(start_year_quarter[0], x) for x in range(start_year_quarter[1], 5)] + \ - [(x, y) for x in range(start_year_quarter[0] + 1, current_year_quarter[0]) for y in range(1, 5)] + \ - [(current_year_quarter[0], x) for x in range(1, current_year_quarter[1] + 1)] - else: - raise Exception("wrong start time:{}".format(start)) - - -def date_and_time(the_date, the_time): - time_str = '{}T{}:00.000'.format(to_time_str(the_date), the_time) - - return to_pd_timestamp(time_str) - - -def next_timestamp(current_timestamp: pd.Timestamp, level: IntervalLevel) -> pd.Timestamp: - current_timestamp = to_pd_timestamp(current_timestamp) - return current_timestamp + pd.Timedelta(seconds=level.to_second()) - - -def evaluate_size_from_timestamp(start_timestamp, - level: IntervalLevel, - one_day_trading_minutes, - end_timestamp: pd.Timestamp = None): - """ - given from timestamp,level,one_day_trading_minutes,this func evaluate size of kdata to current. - it maybe a little bigger than the real size for fetching all the kdata. - - :param start_timestamp: - :type start_timestamp: pd.Timestamp - :param level: - :type level: IntervalLevel - :param one_day_trading_minutes: - :type one_day_trading_minutes: int - """ - if not end_timestamp: - end_timestamp = pd.Timestamp.now() - else: - end_timestamp = to_pd_timestamp(end_timestamp) - - time_delta = end_timestamp - to_pd_timestamp(start_timestamp) - - one_day_trading_seconds = one_day_trading_minutes * 60 - - if level == IntervalLevel.LEVEL_1DAY: - return time_delta.days + 1 - - if level == IntervalLevel.LEVEL_1WEEK: - return int(math.ceil(time_delta.days / 7)) + 1 - - if level == IntervalLevel.LEVEL_1MON: - return int(math.ceil(time_delta.days / 30)) + 1 - - if time_delta.days > 0: - seconds = (time_delta.days + 1) * one_day_trading_seconds - return int(math.ceil(seconds / level.to_second())) + 1 - else: - seconds = time_delta.total_seconds() - return min(int(math.ceil(seconds / level.to_second())) + 1, - one_day_trading_seconds / level.to_second() + 1) - - -def is_finished_kdata_timestamp(timestamp, level: IntervalLevel): - timestamp = to_pd_timestamp(timestamp) - if level.floor_timestamp(timestamp) == timestamp: - return True - return False - - -def is_in_same_interval(t1: pd.Timestamp, t2: pd.Timestamp, level: IntervalLevel): - t1 = to_pd_timestamp(t1) - t2 = to_pd_timestamp(t2) - if level == IntervalLevel.LEVEL_1WEEK: - return t1.week == t2.week - if level == IntervalLevel.LEVEL_1MON: - return t1.month == t2.month - - return level.floor_timestamp(t1) == level.floor_timestamp(t2) - - -def split_time_interval(start, end, method=None, interval=30, freq='D'): - start = to_pd_timestamp(start) - end = to_pd_timestamp(end) - if not method: - while start < end: - interval_end = min(next_date(start, interval), end) - yield pd.date_range(start=start, end=interval_end, freq=freq) - start = next_date(interval_end, 1) - - if method == 'month': - while start <= end: - import calendar - _, day = calendar.monthrange(start.year, start.month) - - interval_end = min(to_pd_timestamp(f'{start.year}-{start.month}-{day}'), end) - yield pd.date_range(start=start, end=interval_end, freq=freq) - start = next_date(interval_end, 1) - - -if __name__ == '__main__': - print(date_and_time('2019-10-01', '10:00')) -# the __all__ is generated -__all__ = ['to_pd_timestamp', 'to_timestamp', 'now_timestamp', 'now_pd_timestamp', 'to_time_str', 'now_time_str', - 'next_date', 'is_same_date', 'is_same_time', 'get_year_quarter', 'day_offset_today', 'get_year_quarters', - 'date_and_time', 'next_timestamp', 'evaluate_size_from_timestamp', 'is_finished_kdata_timestamp', - 'is_in_same_interval'] diff --git a/zvt/utils/utils.py b/zvt/utils/utils.py deleted file mode 100644 index e6a66c7d..00000000 --- a/zvt/utils/utils.py +++ /dev/null @@ -1,203 +0,0 @@ -# -*- coding: utf-8 -*- -import logging -import numbers -from decimal import * -from enum import Enum - -import pandas as pd - -from zvt.utils.time_utils import to_time_str - -getcontext().prec = 16 - -logger = logging.getLogger(__name__) - -none_values = ['不变', '--', '-', '新进'] -zero_values = ['不变', '--', '-', '新进'] - - -def first_item_to_float(the_list): - return to_float(the_list[0]) - - -def second_item_to_float(the_list): - return to_float(the_list[1]) - - -def add_func_to_value(the_map, the_func): - for k, v in the_map.items(): - the_map[k] = (v, the_func) - return the_map - - -def to_float(the_str, default=None): - if not the_str: - return default - if the_str in none_values: - return None - - if '%' in the_str: - return pct_to_float(the_str) - try: - scale = 1.0 - if the_str[-2:] == '万亿': - the_str = the_str[0:-2] - scale = 1000000000000 - elif the_str[-1] == '亿': - the_str = the_str[0:-1] - scale = 100000000 - elif the_str[-1] == '万': - the_str = the_str[0:-1] - scale = 10000 - if not the_str: - return default - return float(Decimal(the_str.replace(',', '')) * Decimal(scale)) - except Exception as e: - logger.error('the_str:{}'.format(the_str)) - logger.exception(e) - return default - - -def pct_to_float(the_str, default=None): - if the_str in none_values: - return None - - try: - return float(Decimal(the_str.replace('%', '')) / Decimal(100)) - except Exception as e: - logger.exception(e) - return default - - -def json_callback_param(the_str): - json_str = the_str[the_str.index("(") + 1:the_str.index(")")].replace('null', 'None') - return eval(json_str) - - -def fill_domain_from_dict(the_domain, the_dict: dict, the_map: dict = None, default_func=lambda x: x): - """ - use field map and related func to fill properties from the dict to the domain - - - :param the_domain: - :type the_domain: DeclarativeMeta - :param the_dict: - :type the_dict: dict - :param the_map: - :type the_map: dict - :param default_func: - :type default_func: function - """ - if not the_map: - the_map = {} - for k in the_dict: - the_map[k] = (k, default_func) - - for k, v in the_map.items(): - if isinstance(v, tuple): - field_in_dict = v[0] - the_func = v[1] - else: - field_in_dict = v - the_func = default_func - - the_value = the_dict.get(field_in_dict) - if the_value is not None: - to_value = the_value - if to_value in none_values: - setattr(the_domain, k, None) - else: - result_value = the_func(to_value) - setattr(the_domain, k, result_value) - exec('the_domain.{}=result_value'.format(k)) - - -SUPPORT_ENCODINGS = ['GB2312', 'GBK', 'GB18030', 'UTF-8'] - - -def read_csv(f, encoding, sep=None, na_values=None): - encodings = [encoding] + SUPPORT_ENCODINGS - for encoding in encodings: - try: - if sep: - return pd.read_csv(f, sep=sep, encoding=encoding, na_values=na_values) - else: - return pd.read_csv(f, encoding=encoding, na_values=na_values) - except UnicodeDecodeError as e: - logger.warning('read_csv failed by using encoding:{}'.format(encoding), e) - f.seek(0) - continue - return None - - -def marshal_object_for_ui(object): - if isinstance(object, Enum): - return object.value - - if isinstance(object, pd.Timestamp): - return to_time_str(object) - - return object - - -def chrome_copy_header_to_dict(src): - lines = src.split('\n') - header = {} - if lines: - for line in lines: - try: - index = line.index(':') - key = line[:index] - value = line[index + 1:] - if key and value: - header.setdefault(key.strip(), value.strip()) - except Exception: - pass - return header - - -def to_positive_number(number): - if isinstance(number, numbers.Number): - return abs(number) - - return 0 - - -def multiple_number(number, factor): - try: - return number * factor - except: - return number - - -def add_to_map_list(the_map, key, value): - result = [] - if key in the_map: - result = the_map[key] - else: - the_map[key] = result - - if value not in result: - result.append(value) - - -def iterate_with_step(data, sub_size=100): - size = len(data) - if size >= sub_size: - step_count = int(size / sub_size) - if size % sub_size: - step_count = step_count + 1 - else: - step_count = 1 - - for step in range(step_count): - if type(data) == pd.DataFrame or type(data) == pd.Series: - yield data.iloc[sub_size * step:sub_size * (step + 1)] - else: - yield data[sub_size * step:sub_size * (step + 1)] - - -# the __all__ is generated -__all__ = ['first_item_to_float', 'second_item_to_float', 'add_func_to_value', 'to_float', 'pct_to_float', - 'json_callback_param', 'fill_domain_from_dict', 'read_csv', 'marshal_object_for_ui', - 'chrome_copy_header_to_dict', 'to_positive_number', 'multiple_number', 'add_to_map_list']