Skip to content

Commit

Permalink
Merge pull request ohmplatform#114 from ohmplatform/FreedomGPT2.0
Browse files Browse the repository at this point in the history
FreedomGPT2.0
  • Loading branch information
bhattaraijay05 authored Aug 25, 2023
2 parents 458d810 + 18432d8 commit aac44d6
Show file tree
Hide file tree
Showing 454 changed files with 17,273 additions and 6,998 deletions.
16 changes: 0 additions & 16 deletions .eslintrc.json

This file was deleted.

244 changes: 122 additions & 122 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ jobs:
- name: Build
id: make_build
run: |
cd llama.cpp && make && cd ..
cd llama.cpp && LLAMA_METAL=1 make && cd ..
- name: Make directory
id: make_directory
Expand Down Expand Up @@ -66,124 +66,124 @@ jobs:
**/*.dmg
**/*.zip
build-macIntel:
name: Build Intel Mac
if: startsWith(github.ref, 'refs/tags/')
runs-on: macos-latest
strategy:
matrix:
os: [macos-latest]
steps:
- name: Checkout code with submodules
uses: actions/checkout@v2
with:
submodules: recursive

- name: Use node 18.x
uses: actions/setup-node@v1
with:
node-version: "18.x"

- name: Yarn install
run: yarn install

- name: Add MacOS certs
if: matrix.os == 'macos-latest' && startsWith(github.ref, 'refs/tags/')
run: chmod +x add-osx-cert.sh && ./add-osx-cert.sh
env:
CERTIFICATE_OSX_APPLICATION: ${{ secrets.CERTIFICATE_OSX_APPLICATION }}
CERTIFICATE_PASSWORD: ${{ secrets.CERTIFICATE_PASSWORD }}

- name: Build
id: make_build
run: |
cd llama.cpp && make && cd ..
- name: Make directory
id: make_directory
run: |
mkdir -p models/llama
- name: Copy mac
id: copy_mac
run: |
cp llama.cpp/main models/llama
- name: Make
if: startsWith(github.ref, 'refs/tags/')
run: yarn make
env:
APPLE_ID: ${{ secrets.APPLE_ID }}
APPLE_ID_PASSWORD: ${{ secrets.APPLE_ID_PASSWORD }}

- name: Release
uses: softprops/action-gh-release@v1
if: startsWith(github.ref, 'refs/tags/')
env:
GITHUB_TOKEN: ${{ secrets.AUTH_TOKEN }}
with:
files: |
**/*.dmg
**/*.zip
!**/*.txt.zip
build-windows:
name: Build Windows
if: startsWith(github.ref, 'refs/tags/')
runs-on: windows-latest
strategy:
matrix:
os: [windows-latest]
steps:
- name: Checkout code with submodules
uses: actions/checkout@v2
with:
submodules: recursive

- name: Use node 18.x
uses: actions/setup-node@v1
with:
node-version: "18.x"

- name: Yarn install
run: yarn install

- name: Add Windows certificate
if: matrix.os == 'windows-latest' && startsWith(github.ref, 'refs/tags/')
id: write_file
uses: timheuer/base64-to-file@v1
with:
fileName: "win-certificate.pfx"
encodedString: ${{ secrets.CERTIFICATE_WINDOWS_PFX }}

- name: Build
id: make_build
run: |
cd llama.cpp && mkdir build && cd build && cmake .. && cmake --build . --config Release && cd ../..
- name: Make directory
id: make_directory
run: |
mkdir -p models/llama
- name: Copy windows
id: copy_windows
run: |
cp llama.cpp/build/bin/Release/main.exe models/llama
- name: Make
if: startsWith(github.ref, 'refs/tags/')
run: yarn make
env:
WINDOWS_PFX_FILE: ${{ steps.write_file.outputs.filePath }}
WINDOWS_PFX_PASSWORD: ${{ secrets.WINDOWS_PFX_PASSWORD }}

- name: Release
uses: softprops/action-gh-release@v1
if: startsWith(github.ref, 'refs/tags/')
env:
GITHUB_TOKEN: ${{ secrets.AUTH_TOKEN }}
with:
files: |
**/*Setup.exe
**/*.nupkg
# build-macIntel:
# name: Build Intel Mac
# if: startsWith(github.ref, 'refs/tags/')
# runs-on: macos-latest
# strategy:
# matrix:
# os: [macos-latest]
# steps:
# - name: Checkout code with submodules
# uses: actions/checkout@v2
# with:
# submodules: recursive

# - name: Use node 18.x
# uses: actions/setup-node@v1
# with:
# node-version: "18.x"

# - name: Yarn install
# run: yarn install

# - name: Add MacOS certs
# if: matrix.os == 'macos-latest' && startsWith(github.ref, 'refs/tags/')
# run: chmod +x add-osx-cert.sh && ./add-osx-cert.sh
# env:
# CERTIFICATE_OSX_APPLICATION: ${{ secrets.CERTIFICATE_OSX_APPLICATION }}
# CERTIFICATE_PASSWORD: ${{ secrets.CERTIFICATE_PASSWORD }}

# - name: Build
# id: make_build
# run: |
# cd llama.cpp && make && cd ..

# - name: Make directory
# id: make_directory
# run: |
# mkdir -p models/llama

# - name: Copy mac
# id: copy_mac
# run: |
# cp llama.cpp/main models/llama

# - name: Make
# if: startsWith(github.ref, 'refs/tags/')
# run: yarn make
# env:
# APPLE_ID: ${{ secrets.APPLE_ID }}
# APPLE_ID_PASSWORD: ${{ secrets.APPLE_ID_PASSWORD }}

# - name: Release
# uses: softprops/action-gh-release@v1
# if: startsWith(github.ref, 'refs/tags/')
# env:
# GITHUB_TOKEN: ${{ secrets.AUTH_TOKEN }}
# with:
# files: |
# **/*.dmg
# **/*.zip
# !**/*.txt.zip

# build-windows:
# name: Build Windows
# if: startsWith(github.ref, 'refs/tags/')
# runs-on: windows-latest
# strategy:
# matrix:
# os: [windows-latest]
# steps:
# - name: Checkout code with submodules
# uses: actions/checkout@v2
# with:
# submodules: recursive

# - name: Use node 18.x
# uses: actions/setup-node@v1
# with:
# node-version: "18.x"

# - name: Yarn install
# run: yarn install --network-timeout 100000

# - name: Add Windows certificate
# if: matrix.os == 'windows-latest' && startsWith(github.ref, 'refs/tags/')
# id: write_file
# uses: timheuer/base64-to-file@v1
# with:
# fileName: "win-certificate.pfx"
# encodedString: ${{ secrets.CERTIFICATE_WINDOWS_PFX }}

# - name: Build
# id: make_build
# run: |
# cd llama.cpp && mkdir build && cd build && cmake .. && cmake --build . --config Release && cd ../..

# - name: Make directory
# id: make_directory
# run: |
# mkdir -p models/llama

# - name: Copy windows
# id: copy_windows
# run: |
# cp llama.cpp/build/bin/Release/main.exe models/llama

# - name: Make
# if: startsWith(github.ref, 'refs/tags/')
# run: yarn make
# env:
# WINDOWS_PFX_FILE: ${{ steps.write_file.outputs.filePath }}
# WINDOWS_PFX_PASSWORD: ${{ secrets.WINDOWS_PFX_PASSWORD }}

# - name: Release
# uses: softprops/action-gh-release@v1
# if: startsWith(github.ref, 'refs/tags/')
# env:
# GITHUB_TOKEN: ${{ secrets.AUTH_TOKEN }}
# with:
# files: |
# **/*Setup.exe
# **/*.nupkg
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ ggml-model-q4_0.bin
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
models

main
# Coverage directory used by tools like istanbul
coverage
*.lcov
Expand Down
2 changes: 1 addition & 1 deletion .gitmodules
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
[submodule "llama.cpp"]
path = llama.cpp
url = https://github.com/ggerganov/llama.cpp.git
url = https://github.com/ggerganov/llama.cpp.git
17 changes: 3 additions & 14 deletions Readme.md
Original file line number Diff line number Diff line change
Expand Up @@ -26,15 +26,6 @@ allows users to run alpaca models on their local machine.
- [Yarn](https://classic.yarnpkg.com/en/docs/install/#windows-stable)
- [Git](https://git-scm.com/downloads)

# If you want to run the project

```sh
git clone --recursive https://github.com/ohmplatform/FreedomGPT.git freedom-gpt
cd freedom-gpt
yarn install
yarn start:prod
```

# If you want to contribute to the project

## Working with the repository
Expand Down Expand Up @@ -101,9 +92,9 @@ OR
yarn docker
```

## Working Video
<!-- ## Working Video
https://user-images.githubusercontent.com/54356944/233825525-d95accf3-a26b-4f37-8fc1-6e922f782a66.mov
https://user-images.githubusercontent.com/54356944/233825525-d95accf3-a26b-4f37-8fc1-6e922f782a66.mov -->

# Credits

Expand All @@ -113,9 +104,7 @@ This project utilizes several open-source packages and libraries, without which

"LLAMA" by Facebook Research - a low-latency, large-scale approximate nearest neighbor search algorithm. https://github.com/facebookresearch/llama

"Alpaca" by Stanford CRFM - a framework for understanding and improving the efficiency and robustness of algorithms. https://crfm.stanford.edu/2023/03/13/alpaca.html

"alpaca-lora" by tloen - a Python library for working with LoRa radios and the Alpaca protocol. https://github.com/tloen/alpaca-lora
"Chatbot UI" - https://github.com/mckaywrigley/chatbot-ui

We would like to express our gratitude to the developers of these packages and their contributors for making their work available to the public under open source licenses. Their contributions have enabled us to build a more robust and efficient project.

Expand Down
7 changes: 7 additions & 0 deletions ambient.d.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
declare module "electron-next" {
const value: (
directories: string | { development: string; production: string },
port?: number
) => void;
export default value;
}
1 change: 0 additions & 1 deletion docker-app/.dockerignore

This file was deleted.

44 changes: 0 additions & 44 deletions docker-app/dockerfile

This file was deleted.

13 changes: 0 additions & 13 deletions docker-app/index.html

This file was deleted.

Binary file removed docker-app/logo.png
Binary file not shown.
Loading

0 comments on commit aac44d6

Please sign in to comment.