Skip to content

Commit

Permalink
First commit. CPU working. GPU not tested.
Browse files Browse the repository at this point in the history
  • Loading branch information
murrellb committed Nov 13, 2024
1 parent 7439500 commit 0f6433b
Show file tree
Hide file tree
Showing 6 changed files with 635 additions and 1 deletion.
7 changes: 7 additions & 0 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,13 @@ uuid = "1285d783-1a6d-4703-8f05-8ac83ef55592"
authors = ["murrellb <[email protected]> and contributors"]
version = "1.0.0-DEV"

[deps]
BytePairEncoding = "a4280ba5-8788-555a-8ca8-4a8c3d966a71"
Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f"
Flux = "587475ba-b771-5e3f-ad9e-33799f191a9c"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
SafeTensors = "eeda0dda-7046-4914-a807-2495fc7abb89"

[compat]
julia = "1.9"

Expand Down
10 changes: 10 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,3 +4,13 @@
[![Dev](https://img.shields.io/badge/docs-dev-blue.svg)](https://MurrellGroup.github.io/Jjama3.jl/dev/)
[![Build Status](https://github.com/MurrellGroup/Jjama3.jl/actions/workflows/CI.yml/badge.svg?branch=main)](https://github.com/MurrellGroup/Jjama3.jl/actions/workflows/CI.yml?query=branch%3Amain)
[![Coverage](https://codecov.io/gh/MurrellGroup/Jjama3.jl/branch/main/graph/badge.svg)](https://codecov.io/gh/MurrellGroup/Jjama3.jl)

# Quickstart

```julia
config = JSON3.read(read("Llama3_2_1B_instruct/config.json", String));
model = load_llama3_from_safetensors("Llama3_2_1B_instruct/model.safetensors", config);
tkn = llama3_tokenizer();
prompt = assistant_prompt("Why would anyone implement the llama3 LLM in Julia?", tkn);
ts = generate(model, prompt, max_new_tokens=500, encoder_for_printing=tkn);
```
8 changes: 7 additions & 1 deletion src/Jjama3.jl
Original file line number Diff line number Diff line change
@@ -1,5 +1,11 @@
module Jjama3

# Write your package code here.
using Flux, BytePairEncoding, SafeTensors, Distributions, LinearAlgebra

include("model.jl")
include("utils.jl")
include("sampling.jl")

export load_llama321B_from_safetensors, load_llama3_from_safetensors, llama3_tokenizer, assistant_prompt, format_llama32_instruction_prompt, generate, forward_loss, forward_inference

end
Loading

0 comments on commit 0f6433b

Please sign in to comment.