Skip to content

Commit

Permalink
Merge branch 'multiple-models'
Browse files Browse the repository at this point in the history
  • Loading branch information
bhattaraijay05 committed May 5, 2023
2 parents 168436e + a70b604 commit 7f5d34a
Show file tree
Hide file tree
Showing 26 changed files with 1,941 additions and 499 deletions.
105 changes: 86 additions & 19 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,33 +7,55 @@ on:

jobs:
build-macArm:
name: Build (macos-latest)
name: Build Arm Mac
if: startsWith(github.ref, 'refs/tags/')
runs-on: macos-latest
runs-on: self-hosted
strategy:
matrix:
os: [macos-latest]
steps:
- name: Checkout
uses: actions/checkout@v1
- name: Checkout code with submodules
uses: actions/checkout@v2
with:
submodules: recursive

- name: Use node 18.x
uses: actions/setup-node@v1
with:
node-version: "18.x"

- name: Yarn install
run: yarn install
- name: Add MacOS certs
if: matrix.os == 'macos-latest' && startsWith(github.ref, 'refs/tags/')
run: chmod +x add-osx-cert.sh && ./add-osx-cert.sh
env:
CERTIFICATE_OSX_APPLICATION: ${{ secrets.CERTIFICATE_OSX_APPLICATION }}
CERTIFICATE_PASSWORD: ${{ secrets.CERTIFICATE_PASSWORD }}

# - name: Add MacOS certs
# if: matrix.os == 'macos-latest' && startsWith(github.ref, 'refs/tags/')
# run: chmod +x add-osx-cert.sh && ./add-osx-cert.sh
# env:
# CERTIFICATE_OSX_APPLICATION: ${{ secrets.CERTIFICATE_OSX_APPLICATION }}
# CERTIFICATE_PASSWORD: ${{ secrets.CERTIFICATE_PASSWORD }}

- name: Build
id: make_build
run: |
cd llama.cpp && make && cd ..
- name: Make directory
id: make_directory
run: |
mkdir -p models/llama
- name: Copy mac
id: copy_mac
run: |
cp llama.cpp/main models/llama
- name: Make
if: startsWith(github.ref, 'refs/tags/')
run: yarn make-macArm
run: yarn make
env:
APPLE_ID: ${{ secrets.APPLE_ID }}
APPLE_ID_PASSWORD: ${{ secrets.APPLE_ID_PASSWORD }}

- name: Release
uses: softprops/action-gh-release@v1
if: startsWith(github.ref, 'refs/tags/')
Expand All @@ -45,33 +67,55 @@ jobs:
**/*.zip
build-macIntel:
name: Build (macos-latest)
name: Build Intel Mac
if: startsWith(github.ref, 'refs/tags/')
runs-on: macos-latest
strategy:
matrix:
os: [macos-latest]
steps:
- name: Checkout
uses: actions/checkout@v1
- name: Checkout code with submodules
uses: actions/checkout@v2
with:
submodules: recursive

- name: Use node 18.x
uses: actions/setup-node@v1
with:
node-version: "18.x"

- name: Yarn install
run: yarn install

- name: Add MacOS certs
if: matrix.os == 'macos-latest' && startsWith(github.ref, 'refs/tags/')
run: chmod +x add-osx-cert.sh && ./add-osx-cert.sh
env:
CERTIFICATE_OSX_APPLICATION: ${{ secrets.CERTIFICATE_OSX_APPLICATION }}
CERTIFICATE_PASSWORD: ${{ secrets.CERTIFICATE_PASSWORD }}

- name: Build
id: make_build
run: |
cd llama.cpp && make && cd ..
- name: Make directory
id: make_directory
run: |
mkdir -p models/llama
- name: Copy mac
id: copy_mac
run: |
cp llama.cpp/main models/llama
- name: Make
if: startsWith(github.ref, 'refs/tags/')
run: yarn make-macIntel
run: yarn make
env:
APPLE_ID: ${{ secrets.APPLE_ID }}
APPLE_ID_PASSWORD: ${{ secrets.APPLE_ID_PASSWORD }}

- name: Release
uses: softprops/action-gh-release@v1
if: startsWith(github.ref, 'refs/tags/')
Expand All @@ -81,36 +125,59 @@ jobs:
files: |
**/*.dmg
**/*.zip
!**/*.txt.zip
build-windows:
name: Build (windows-latest)
name: Build Windows
if: startsWith(github.ref, 'refs/tags/')
runs-on: windows-latest
strategy:
matrix:
os: [windows-latest]
steps:
- name: Checkout
uses: actions/checkout@v1
- name: Checkout code with submodules
uses: actions/checkout@v2
with:
submodules: recursive

- name: Use node 18.x
uses: actions/setup-node@v1
with:
node-version: "18.x"

- name: Yarn install
run: yarn install

- name: Add Windows certificate
if: matrix.os == 'windows-latest' && startsWith(github.ref, 'refs/tags/')
id: write_file
uses: timheuer/base64-to-file@v1
with:
fileName: "win-certificate.pfx"
encodedString: ${{ secrets.CERTIFICATE_WINDOWS_PFX }}

- name: Build
id: make_build
run: |
cd llama.cpp && mkdir build && cd build && cmake .. && cmake --build . --config Release && cd ../..
- name: Make directory
id: make_directory
run: |
mkdir -p models/llama
- name: Copy windows
id: copy_windows
run: |
cp llama.cpp/build/bin/Release/main.exe models/llama
- name: Make
if: startsWith(github.ref, 'refs/tags/')
run: yarn make-win
run: yarn make
env:
WINDOWS_PFX_FILE: ${{ steps.write_file.outputs.filePath }}
WINDOWS_PFX_PASSWORD: ${{ secrets.WINDOWS_PFX_PASSWORD }}

- name: Release
uses: softprops/action-gh-release@v1
if: startsWith(github.ref, 'refs/tags/')
Expand Down
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,10 @@ pids
.env
devconst.ts
keys

ggml-model-q4_0.bin
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
models

# Coverage directory used by tools like istanbul
coverage
Expand Down
6 changes: 3 additions & 3 deletions .gitmodules
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
[submodule "alpaca.cpp"]
path = alpaca.cpp
url = https://github.com/antimatter15/alpaca.cpp.git
[submodule "llama.cpp"]
path = llama.cpp
url = https://github.com/ggerganov/llama.cpp.git
25 changes: 7 additions & 18 deletions Readme.md
Original file line number Diff line number Diff line change
Expand Up @@ -26,15 +26,6 @@ allows users to run alpaca models on their local machine.
- [Yarn](https://classic.yarnpkg.com/en/docs/install/#windows-stable)
- [Git](https://git-scm.com/downloads)

# If you want to run the application directly (Mac and Windows only)

```sh
git clone https://github.com/ohmplatform/FreedomGPT.git freedom-gpt
cd freedom-gpt
yarn install
yarn start:prod
```

# If you want to contribute to the project

## Working with the repository
Expand All @@ -45,13 +36,13 @@ cd freedom-gpt
yarn install
```

# Building the alpaca.cpp library
# Building the llama.cpp library

## Building from Source (MacOS/Linux)

```sh
cd alpaca.cpp
make chat
cd llama.cpp
make
```

## Building from Source (Windows)
Expand All @@ -60,12 +51,12 @@ make chat
- Run the following commands one by one:

```ps1
cd alpaca.cpp
cd llama.cpp
cmake .
cmake --build . --config Release
```

- You should now have a `Release` folder with a `chat.exe` file inside it. You can run this file to test the chat client.
- You should now have a `Release` folder with a `main.exe` file inside it. You can run this file to test the chat client.

## Changing the API URL

Expand Down Expand Up @@ -103,22 +94,20 @@ yarn docker

## Working Video

https://user-images.githubusercontent.com/54356944/231952507-94ef7335-4238-43ee-8c45-677f6cd48988.mov
https://user-images.githubusercontent.com/54356944/233825525-d95accf3-a26b-4f37-8fc1-6e922f782a66.mov

# Credits

This project utilizes several open-source packages and libraries, without which this project would not have been possible:

"alpaca.cpp" by antimatter15 - a C++ library for Alpaca API. https://github.com/antimatter15/alpaca.cpp
"llama.cpp" - C++ library. https://github.com/ggerganov/llama.cpp

"LLAMA" by Facebook Research - a low-latency, large-scale approximate nearest neighbor search algorithm. https://github.com/facebookresearch/llama

"Alpaca" by Stanford CRFM - a framework for understanding and improving the efficiency and robustness of algorithms. https://crfm.stanford.edu/2023/03/13/alpaca.html

"alpaca-lora" by tloen - a Python library for working with LoRa radios and the Alpaca protocol. https://github.com/tloen/alpaca-lora

"alpaca-lora-7b" by Hugging Face - a pre-trained language model for the Alpaca protocol. https://huggingface.co/Sosaka/Alpaca-native-4bit-ggml/tree/main

We would like to express our gratitude to the developers of these packages and their contributors for making their work available to the public under open source licenses. Their contributions have enabled us to build a more robust and efficient project.

# LICENSE
Expand Down
1 change: 0 additions & 1 deletion alpaca.cpp
Submodule alpaca.cpp deleted from a0c74a
1 change: 1 addition & 0 deletions llama.cpp
Submodule llama.cpp added at 0e018f
6 changes: 6 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -62,8 +62,14 @@
"typescript": "~4.5.4"
},
"dependencies": {
"@electron/remote": "^2.0.9",
"@emotion/react": "^11.10.8",
"@emotion/styled": "^11.10.8",
"@mui/material": "^5.12.2",
"axios": "^1.3.4",
"check-disk-space": "^3.3.1",
"cors": "^2.8.5",
"electron-dl": "^3.5.0",
"electron-squirrel-startup": "^1.0.0",
"express": "^4.18.2",
"react": "^18.2.0",
Expand Down
24 changes: 21 additions & 3 deletions src/app.tsx
Original file line number Diff line number Diff line change
@@ -1,14 +1,32 @@
import { ThemeProvider, createTheme } from "@mui/material/styles";
import React from "react";
import ReactDOM from "react-dom/client";
import { io } from "socket.io-client";
import App from "./app/App";
import DownloadProgressProvider from "./app/context/DownloadContext";
import MessageFetchProvider from "./app/context/MessageFetch";
import ModelProvider from "./app/context/ModelSelection";

const socket = io("http://localhost:8889");

const darkTheme = createTheme({
palette: {
mode: "dark",
},
});

function render() {
ReactDOM.createRoot(document.getElementById("root") as HTMLElement).render(
<React.StrictMode>
<MessageFetchProvider>
<App />
</MessageFetchProvider>
<ModelProvider socket={socket}>
<DownloadProgressProvider>
<MessageFetchProvider>
<ThemeProvider theme={darkTheme}>
<App socket={socket} />
</ThemeProvider>
</MessageFetchProvider>
</DownloadProgressProvider>
</ModelProvider>
</React.StrictMode>
);
}
Expand Down
Loading

0 comments on commit 7f5d34a

Please sign in to comment.