After-Market Data Gen #27380
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# """ | |
# The MIT License (MIT) | |
# Copyright (c) 2023 pkjmesra | |
# Permission is hereby granted, free of charge, to any person obtaining a copy | |
# of this software and associated documentation files (the "Software"), to deal | |
# in the Software without restriction, including without limitation the rights | |
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | |
# copies of the Software, and to permit persons to whom the Software is | |
# furnished to do so, subject to the following conditions: | |
# The above copyright notice and this permission notice shall be included in all | |
# copies or substantial portions of the Software. | |
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | |
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | |
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | |
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | |
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | |
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | |
# SOFTWARE. | |
# """ | |
name: 9. After-Market Data Gen | |
on: | |
workflow_dispatch: | |
inputs: | |
name: | |
description: 'Run Details' | |
required: false | |
default: 'After-Market Data Gen' | |
targetPythonVersion: | |
description: 'Target Python version: 3.9 , 3.10, 3.11 ?' | |
required: false | |
type: string | |
default: '3.11' | |
runson: | |
description: 'Runner OS. e.g: ubuntu-20.04, windows-latest or macos-latest' | |
required: false | |
default: 'ubuntu-20.04' | |
installtalib: | |
description: 'Is TA-Lib install required? (Y/N). N to skip installing talib.' | |
required: false | |
default: 'Y' | |
skipDownload: | |
description: 'Skip pkl download for 1y. Leave empty to NOT skip download. Y to skip download' | |
required: false | |
default: '' | |
cleanuphistoricalscans: | |
description: 'Clean up historical scans? Leave empty to run historical scans. Y to run clean up' | |
required: false | |
default: 'N' | |
scanOptions: | |
description: 'Scan options for historical scans. cleanuphistoricalscans MUST to be left empty to run historical scans.' | |
required: false | |
default: '--reScanForZeroSize --scanDaysInPast 3 -s2 42,0,21,22,29,M,Z -s1 W,N,E,M,Z,S,0,2,3,4,6,7,9,10,13,14,15 -s0 S,T,E,U,Z,F,H,Y,B,G,C,M,D,I,L,P -s3 "0" -s4 "0" --branchname actions-data-download --scans --triggerRemotely -f' | |
schedule: | |
- cron: '58 9 * * 0-6' | |
run-name: ${{ inputs.name || 'After-Market Data Gen'}} | |
jobs: | |
Download_Stock_Data: | |
runs-on: ${{ inputs.runson || 'ubuntu-20.04'}} #ubuntu-20.04 #windows-latest | |
permissions: write-all | |
# actions: write | |
outputs: | |
should_clean: ${{ inputs.scanOptions }} | |
steps: | |
- uses: actions/checkout@v4 | |
with: | |
ref: actions-data-download | |
# - name: Take pull from main | |
# shell: bash | |
# run: | | |
# git config user.name github-actions >/dev/null 2>&1 | |
# git config user.email [email protected] >/dev/null 2>&1 | |
# git config remote.origin.fetch '+refs/heads/*:refs/remotes/origin/*' >/dev/null 2>&1 | |
# git remote update >/dev/null 2>&1 | |
# git fetch >/dev/null 2>&1 | |
# git config pull.rebase false >/dev/null 2>&1 | |
# git pull origin main | |
# git push -v -u origin +actions-data-download >/dev/null 2>&1 | |
- name: Set up Python | |
uses: actions/setup-python@v5 | |
with: | |
python-version: ${{ inputs.targetPythonVersion || 3.11 }} | |
- name: Restore Dependencies from Windows Cache | |
if: startsWith(runner.os, 'Windows') | |
uses: actions/cache@v4 | |
continue-on-error: true | |
with: | |
path: ~\AppData\Local\pip\Cache | |
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} | |
restore-keys: | | |
${{ runner.os }}-pip- | |
- name: Load Cache for Linux Dependencies | |
uses: actions/cache@v4 | |
continue-on-error: true | |
if: startsWith(runner.os, 'Linux') | |
with: | |
path: | | |
/usr/include/ta-lib | |
/usr/bin/ta-lib-config | |
/usr/lib/libta_lib.la | |
/usr/lib/libta_lib.a | |
key: ${{ runner.os }}-talib | |
restore-keys: | | |
${{ runner.os }}-talib | |
- name: Install TA-Lib for Windows | |
if: ${{ inputs.installtalib != 'N' && inputs.skipDownload == '' && startsWith(runner.os, 'Windows') }} | |
shell: bash | |
run: | | |
python3 -m pip install --upgrade pip | |
cd .github/dependencies/ | |
if [[ "${{ inputs.targetPythonVersion }}" == "" ]] ;then | |
pip3 install TA_Lib-0.4.29-cp311-cp311-win_amd64.whl | |
elif [[ "${{ inputs.targetPythonVersion }}" == "3.9" ]] ;then | |
pip3 install TA_Lib-0.4.19-cp39-cp39-win_amd64.whl | |
elif [[ "${{ inputs.targetPythonVersion }}" == "3.11" ]] ;then | |
pip3 install TA_Lib-0.4.29-cp311-cp311-win_amd64.whl | |
elif [[ "${{ inputs.targetPythonVersion }}" == "3.12" ]] ;then | |
pip3 install TA_Lib-0.4.29-cp312-cp312-win_amd64.whl | |
fi | |
- name: Install TA-Lib for Linux | |
if: ${{ inputs.installtalib != 'N' && inputs.skipDownload == '' && startsWith(runner.os, 'Linux') }} | |
run: | | |
cd .github/dependencies/ | |
pwd | |
tar -xzf ta-lib-0.4.0-src.tar.gz | |
cd ta-lib/ | |
./configure --prefix=/usr | |
make | |
sudo make install | |
cd /home/runner/work/PKScreener/PKScreener/ | |
pip3 install ta-lib==0.4.32 | |
- name: Environment setup to get latest EXE | |
if: ${{ inputs.skipDownload == '' }} | |
shell: bash | |
id: releaseurlsetter | |
run: | | |
pip3 install requests | |
python3 .github/workflows/githubutilities.py -d | |
- name: Download latest executable on Windows | |
shell: cmd | |
if: ${{ inputs.skipDownload == '' && startsWith(runner.os, 'Windows' ) }} | |
env: | |
exe_path: ${{ steps.releaseurlsetter.outputs.DOWNLOAD_URL }} | |
run: | | |
curl -o pkscreenercli.exe -JL %exe_path% | |
- name: Download latest executable on Ubuntu | |
shell: bash | |
if: ${{ inputs.skipDownload == '' && startsWith(runner.os, 'Linux' ) }} | |
env: | |
exe_path: ${{ steps.releaseurlsetter.outputs.DOWNLOAD_URL }} | |
run: | | |
curl -o /home/runner/work/PKScreener/PKScreener/pkscreenercli.bin -JL $exe_path | |
chmod +x /home/runner/work/PKScreener/PKScreener/pkscreenercli.bin | |
- name: Delete stock data | |
shell: bash | |
continue-on-error: true | |
if: ${{ inputs.skipDownload == '' && startsWith(runner.os, 'Linux' ) }} | |
run: | | |
rm -rf actions-data-download/*stock_data_*.pkl || true | |
rm -rf actions-data-download/*DB.pkl || true | |
- name: Delete results stock data | |
shell: bash | |
continue-on-error: true | |
if: ${{ inputs.skipDownload == '' && startsWith(runner.os, 'Linux' ) }} | |
run: | | |
rm -rf results/Data/*stock_data_*.pkl || true | |
rm -rf results/Data/*DB.pkl || true | |
- name: Remove deleted file from git | |
shell: bash | |
continue-on-error: true | |
if: ${{ inputs.skipDownload == '' && startsWith(runner.os, 'Linux' ) }} | |
run: | | |
git ls-files --deleted -z | xargs -0 git rm | |
- name: Download 52 week Stock Data on Ubuntu | |
shell: bash | |
continue-on-error: true | |
if: ${{ inputs.skipDownload == '' && startsWith(runner.os, 'Linux' ) }} | |
run: | | |
/home/runner/work/PKScreener/PKScreener/pkscreenercli.bin -d -a Y | |
/home/runner/work/PKScreener/PKScreener/pkscreenercli.bin -d -a Y -i 1m | |
rm -rf results//Data/*.lck || true | |
ls -l results/Data | |
cp results/Data/*.pkl actions-data-download | |
ls -l actions-data-download | |
git status | |
- name: Download 52 week Stock Data on Windows | |
continue-on-error: true | |
if: ${{ inputs.skipDownload == '' && startsWith(runner.os, 'Windows' ) }} | |
shell: cmd | |
id: downloader | |
run: | | |
rmdir /s /q actions-data-download | |
mkdir actions-data-download | |
rmdir /s /q results | |
mkdir results | |
pkscreenercli.exe -d -a Y | |
env: | |
GITHUB_TOKEN : ${{secrets.GITHUB_TOKEN}} | |
- name: Download Intraday Stock Data on Windows | |
continue-on-error: true | |
if: ${{ inputs.skipDownload == '' && startsWith(runner.os, 'Windows' ) }} | |
shell: cmd | |
id: intradaydownloader | |
run: | | |
pkscreenercli.exe -d -a Y -i 1m | |
env: | |
GITHUB_TOKEN : ${{secrets.GITHUB_TOKEN}} | |
- name: Copy Stock Data Pickles on Windows | |
continue-on-error: true | |
if: ${{ inputs.skipDownload == '' && startsWith(runner.os, 'Windows' ) }} | |
shell: cmd | |
run: | | |
copy results\*DB.pkl actions-data-download | |
copy results\stock_data_*.pkl actions-data-download | |
- name: Push Pickle Data for stock data | |
if: ${{ inputs.skipDownload == '' }} | |
continue-on-error: true | |
shell: bash | |
run: | | |
git config user.name github-actions | |
git config user.email [email protected] | |
git config remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*" | |
git remote update | |
git fetch | |
git add actions-data-download/*.pkl --force | |
git add results/Data/*.pkl --force | |
git add results/Indices/*.csv --force | |
git add results/Indices/*.csv -u | |
git add results/Data/*.pkl -u | |
git add actions-data-download/*.pkl -u | |
git commit -m "GitHub-Action-Workflow-Market-Data-Download-(Default-Config)" | |
git push -v -u origin +actions-data-download | |
env: | |
GITHUB_TOKEN : ${{secrets.GITHUB_TOKEN}} | |
- name: Squash Commits (Python) | |
if: ${{ inputs.skipDownload == '' }} | |
shell: bash | |
continue-on-error: true | |
run: | | |
git config user.name github-actions | |
git config user.email [email protected] | |
git fetch | |
python3 .github/workflows/squash.py -b actions-data-download -m "GitHub-Action-Workflow-Market-Data-Download-(Default-Config)" | |
env: | |
GITHUB_TOKEN : ${{secrets.GITHUB_TOKEN}} | |
- name: Install Python Dependencies for clean up | |
if: ${{ inputs.cleanuphistoricalscans == 'Y' || inputs.skipDownload == '' }} | |
continue-on-error: true | |
run: | | |
pip3 install --upgrade pip | |
pip3 install -r requirements.txt | |
pip3 install . | |
- name: Configure env.dev file | |
run: | | |
python3 -c "import os; file = open('.env.dev', 'w'); file.write('CHAT_ID='+os.environ['CHAT_ID_PROD']+'\n'); file.write('TOKEN='+os.environ['TOKEN_DEV']+'\n'); file.write('chat_idADMIN='+os.environ['CHAT_IDADMIN_DEV']+'\n'); file.close()" | |
python3 -c "import os; file = open('pkscreener/.env.dev', 'w'); file.write('CHAT_ID='+os.environ['CHAT_ID_PROD']+'\n'); file.write('TOKEN='+os.environ['TOKEN_DEV']+'\n'); file.write('chat_idADMIN='+os.environ['CHAT_IDADMIN_DEV']+'\n'); file.close()" | |
shell: bash | |
env: | |
CHAT_ID_PROD : ${{secrets.CHAT_ID_PROD}} | |
TOKEN_DEV : ${{secrets.TOKEN_DEV}} | |
CHAT_IDADMIN_DEV : ${{secrets.CHAT_IDADMIN_DEV}} | |
- name: Trigger miscellaneous tasks that are download-data dependent | |
if: ${{ inputs.skipDownload == '' }} | |
shell: bash | |
continue-on-error: true | |
run: | | |
python3 .github/workflows/workflowtriggers.py -f --misc | |
env: | |
GITHUB_TOKEN : ${{secrets.GITHUB_TOKEN}} | |
- name: Clean up old historical scan results | |
shell: bash | |
if: ${{ inputs.cleanuphistoricalscans == 'Y' }} | |
continue-on-error: true | |
env: | |
RUNNER : "LOCAL_RUN_SCANNER" | |
GITHUB_TOKEN : ${{secrets.GITHUB_TOKEN}} | |
run: | | |
python3 .github/workflows/workflowtriggers.py -f --cleanuphistoricalscans --scanDaysInPast 450 -s0 S,T,E,U,Z,F,H,Y,G,M,D,I,L -s1 "" -s2 "" -s3 "" -s4 "" --branchname actions-data-download | |
- name: Setup Python environment for triggers on Ubuntu | |
shell: bash | |
if: ${{ inputs.cleanuphistoricalscans != 'Y' && startsWith(runner.os, 'Linux') }} | |
continue-on-error: true | |
run: | | |
cd .github/dependencies/ | |
pwd | |
tar -xzf ta-lib-0.4.0-src.tar.gz | |
cd ta-lib/ | |
./configure --prefix=/usr | |
make | |
sudo make install | |
cd /home/runner/work/PKScreener/PKScreener/ | |
python3 -m pip install --upgrade pip | |
pip3 install -r requirements.txt | |
pip3 install -r requirements-dev.txt | |
pip3 install ta-lib==0.4.32 | |
pip3 install . | |
- name: Setup Python environment for triggers on Windows | |
continue-on-error: true | |
if: ${{ inputs.cleanuphistoricalscans != 'Y' && startsWith(runner.os, 'Windows') }} | |
shell: bash | |
run: | | |
cd .github/dependencies/ | |
if [[ "${{ inputs.targetPythonVersion }}" == "" ]] ;then | |
pip3 install TA_Lib-0.4.29-cp311-cp311-win_amd64.whl | |
elif [[ "${{ inputs.targetPythonVersion }}" == "3.9" ]] ;then | |
pip3 install TA_Lib-0.4.19-cp39-cp39-win_amd64.whl | |
elif [[ "${{ inputs.targetPythonVersion }}" == "3.11" ]] ;then | |
pip3 install TA_Lib-0.4.29-cp311-cp311-win_amd64.whl | |
elif [[ "${{ inputs.targetPythonVersion }}" == "3.12" ]] ;then | |
pip3 install TA_Lib-0.4.29-cp312-cp312-win_amd64.whl | |
fi | |
cd .. | |
cd .. | |
python3 -m pip install --upgrade pip | |
pip3 install -r requirements.txt | |
pip3 install -r requirements-dev.txt | |
pip3 install . | |
- name: Run Historical Scan on Ubuntu | |
shell: bash | |
continue-on-error: true | |
env: | |
RUNNER : "LOCAL_RUN_SCANNER" | |
GSHEET_SERVICE_ACCOUNT_DEV: ${{secrets.GSHEET_SERVICE_ACCOUNT_DEV}} | |
GSHEET_VIEWONLY_LINK: ${{secrets.GSHEET_VIEWONLY_LINK}} | |
GITHUB_TOKEN : ${{secrets.GITHUB_TOKEN}} | |
if: ${{ inputs.cleanuphistoricalscans != 'Y' && startsWith(runner.os, 'Linux') }} | |
run: | | |
python3 .github/workflows/workflowtriggers.py ${{ inputs.scanOptions }} | |
echo "STAGED_COMMITS=$(git diff --cached --numstat | wc -l)" >> $GITHUB_OUTPUT | |
id: histRunnnerLinux | |
- name: Run Historical Scan on Windows | |
continue-on-error: true | |
env: | |
RUNNER : "LOCAL_RUN_SCANNER" | |
GSHEET_SERVICE_ACCOUNT_DEV: ${{secrets.GSHEET_SERVICE_ACCOUNT_DEV}} | |
GSHEET_VIEWONLY_LINK: ${{secrets.GSHEET_VIEWONLY_LINK}} | |
GITHUB_TOKEN : ${{secrets.GITHUB_TOKEN}} | |
if: ${{ inputs.cleanuphistoricalscans != 'Y' && startsWith(runner.os, 'Windows') }} | |
shell: cmd | |
run: | | |
python3 .github/workflows/workflowtriggers.py ${{ inputs.scanOptions }} | |
echo "STAGED_COMMITS=$(git diff --cached --numstat | wc -l)" >> $GITHUB_OUTPUT | |
id: histRunnnerWindows | |
- name: Push Historical scan Data | |
shell: bash | |
if: ${{ inputs.cleanuphistoricalscans != 'Y' && ( steps.histRunnnerLinux.outputs.STAGED_COMMITS != 0 || steps.histRunnnerWindows.outputs.STAGED_COMMITS != 0 ) }} | |
run: | | |
git config user.name github-actions | |
git config user.email [email protected] | |
git config remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*" | |
git remote update | |
git fetch | |
git add actions-data-scan/*.txt --force | |
git commit -m "GitHub-Action-Workflow-Market-Data-Scan-Results-(Default-Config)" | |
git push -v -u origin +actions-data-download | |
env: | |
GITHUB_TOKEN : ${{secrets.GITHUB_TOKEN}} | |
- name: Delete this workflow run | |
continue-on-error: true | |
if: ${{ inputs.skipDownload == '' && startsWith(runner.os, 'Linux' ) }} | |
shell: bash | |
run: | | |
gh run list --status success --limit 1000 --json databaseId -q '.[].databaseId' | xargs -IID gh api "repos/$(gh repo view --json nameWithOwner -q .nameWithOwner)/actions/runs/ID" -X DELETE | |
gh run list --status cancelled --limit 1000 --json databaseId -q '.[].databaseId' | xargs -IID gh api "repos/$(gh repo view --json nameWithOwner -q .nameWithOwner)/actions/runs/ID" -X DELETE | |
env: | |
GH_TOKEN : ${{secrets.GITHUB_TOKEN}} | |
# || '--reScanForZeroSize --scans --local --scanDaysInPast 3 -s2 0,22,29,42,50,M,Z -s0 S,T,E,U,Z,F,H,Y,B,G,C,M,D,I,L,P -s1 W,N,E,M,Z,S,0,2,3,4,6,7,9,10,13,14,15 -s3 "0" -s4 "0" --branchname actions-data-download -f' }} | |
# Delete_Workflow_Run: | |
# runs-on: ubuntu-20.04 | |
# needs: [Download_Stock_Data] | |
# if: ${{ needs.Download_Stock_Data.outputs.should_clean == ''}} | |
# name: Delete workflow run | |
# steps: | |
# - uses: actions/checkout@v4 | |
# with: | |
# ref: main | |
# - name: Delete this workflow run | |
# if: startsWith(runner.os, 'Linux') | |
# shell: bash | |
# run: | | |
# gh run list --status success --limit 100 --json databaseId -q '.[].databaseId' | xargs -IID gh api "repos/$(gh repo view --json nameWithOwner -q .nameWithOwner)/actions/runs/ID" -X DELETE | |
# gh run list --status cancelled --limit 100 --json databaseId -q '.[].databaseId' | xargs -IID gh api "repos/$(gh repo view --json nameWithOwner -q .nameWithOwner)/actions/runs/ID" -X DELETE | |
# # for id in $(gh run list --limit 100 --jq ".[] | select (.status == \"success\" ) | .databaseId" --json databaseId,status); do gh run delete $id; done | |
# # for id in $(gh run list --limit 100 --jq ".[] | select (.status == \"cancelled\" ) | .databaseId" --json databaseId,status); do gh run delete $id; done | |
# # for id in $(gh run list --limit 100 --jq ".[] | select (.status == \"failure\" ) | .databaseId" --json databaseId,status); do gh run delete $id; done | |
# # Cancel runs queued or in_progress. See https://cli.github.com/manual/gh_run_list | |
# # for id in $(gh run list -w "13. Backtest for Generic" --limit 100 --jq ".[] | select (.status == \"queued\" ) | .databaseId" --json databaseId,status); do gh run cancel $id; done | |
# env: | |
# GH_TOKEN : ${{secrets.GITHUB_TOKEN}} |