-
-
Notifications
You must be signed in to change notification settings - Fork 509
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
16 changed files
with
252 additions
and
94 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,63 @@ | ||
# Lumo Example | ||
Introducing Lumo-70B-Instruct - the largest and most advanced AI model ever created for the Solana ecosystem. Built on Meta's groundbreaking LLaMa 3.3 70B Instruct foundation, this revolutionary model represents a quantum leap in blockchain-specific artificial intelligence. With an unprecedented 70 billion parameters and trained on the most comprehensive Solana documentation dataset ever assembled, Lumo-70B-Instruct sets a new standard for developer assistance in the blockchain space. | ||
|
||
|
||
- [Docs](https://huggingface.co/lumolabs-ai/Lumo-70B-Instruct) | ||
|
||
```python | ||
from swarms import Agent | ||
from transformers import LlamaForCausalLM, AutoTokenizer | ||
import torch | ||
from transformers import BitsAndBytesConfig | ||
|
||
class Lumo: | ||
""" | ||
A class for generating text using the Lumo model with 4-bit quantization. | ||
""" | ||
def __init__(self): | ||
""" | ||
Initializes the Lumo model with 4-bit quantization and a tokenizer. | ||
""" | ||
# Configure 4-bit quantization | ||
bnb_config = BitsAndBytesConfig( | ||
load_in_4bit=True, | ||
bnb_4bit_quant_type="nf4", | ||
bnb_4bit_compute_dtype=torch.float16, | ||
llm_int8_enable_fp32_cpu_offload=True | ||
) | ||
|
||
self.model = LlamaForCausalLM.from_pretrained( | ||
"lumolabs-ai/Lumo-70B-Instruct", | ||
device_map="auto", | ||
quantization_config=bnb_config, | ||
use_cache=False, | ||
attn_implementation="sdpa" | ||
) | ||
self.tokenizer = AutoTokenizer.from_pretrained("lumolabs-ai/Lumo-70B-Instruct") | ||
|
||
def run(self, task: str) -> str: | ||
""" | ||
Generates text based on the given prompt using the Lumo model. | ||
Args: | ||
prompt (str): The input prompt for the model. | ||
Returns: | ||
str: The generated text. | ||
""" | ||
inputs = self.tokenizer(task, return_tensors="pt").to(self.model.device) | ||
outputs = self.model.generate(**inputs, max_new_tokens=100) | ||
return self.tokenizer.decode(outputs[0], skip_special_tokens=True) | ||
|
||
|
||
|
||
|
||
Agent( | ||
agent_name="Solana-Analysis-Agent", | ||
model_name=Lumo(), | ||
max_loops="auto", | ||
interactive=True, | ||
streaming_on=True, | ||
).run("How do i create a smart contract in solana?") | ||
|
||
``` |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,59 @@ | ||
import torch | ||
from transformers import ( | ||
AutoTokenizer, | ||
BitsAndBytesConfig, | ||
LlamaForCausalLM, | ||
) | ||
|
||
from swarms import Agent | ||
|
||
|
||
class Lumo: | ||
""" | ||
A class for generating text using the Lumo model with 4-bit quantization. | ||
""" | ||
def __init__(self): | ||
""" | ||
Initializes the Lumo model with 4-bit quantization and a tokenizer. | ||
""" | ||
# Configure 4-bit quantization | ||
bnb_config = BitsAndBytesConfig( | ||
load_in_4bit=True, | ||
bnb_4bit_quant_type="nf4", | ||
bnb_4bit_compute_dtype=torch.float16, | ||
llm_int8_enable_fp32_cpu_offload=True | ||
) | ||
|
||
self.model = LlamaForCausalLM.from_pretrained( | ||
"lumolabs-ai/Lumo-70B-Instruct", | ||
device_map="auto", | ||
quantization_config=bnb_config, | ||
use_cache=False, | ||
attn_implementation="sdpa" | ||
) | ||
self.tokenizer = AutoTokenizer.from_pretrained("lumolabs-ai/Lumo-70B-Instruct") | ||
|
||
def run(self, task: str) -> str: | ||
""" | ||
Generates text based on the given prompt using the Lumo model. | ||
Args: | ||
prompt (str): The input prompt for the model. | ||
Returns: | ||
str: The generated text. | ||
""" | ||
inputs = self.tokenizer(task, return_tensors="pt").to(self.model.device) | ||
outputs = self.model.generate(**inputs, max_new_tokens=100) | ||
return self.tokenizer.decode(outputs[0], skip_special_tokens=True) | ||
|
||
|
||
|
||
|
||
Agent( | ||
agent_name="Solana-Analysis-Agent", | ||
model_name=Lumo(), | ||
max_loops="auto", | ||
interactive=True, | ||
streaming_on=True, | ||
).run("How do i create a smart contract in solana?") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -5,7 +5,7 @@ build-backend = "poetry.core.masonry.api" | |
|
||
[tool.poetry] | ||
name = "swarms" | ||
version = "6.9.7" | ||
version = "6.9.8" | ||
description = "Swarms - TGSC" | ||
license = "MIT" | ||
authors = ["Kye Gomez <[email protected]>"] | ||
|
@@ -62,7 +62,6 @@ python = ">=3.10,<4.0" | |
asyncio = ">=3.4.3,<4.0" | ||
toml = "*" | ||
pypdf = "5.1.0" | ||
swarm-models = "*" | ||
loguru = "*" | ||
pydantic = "*" | ||
tenacity = "*" | ||
|
@@ -76,7 +75,6 @@ aiofiles = "*" | |
clusterops = "*" | ||
# chromadb = "*" | ||
rich = "*" | ||
pandas = "*" | ||
# sentence-transformers = "*" | ||
|
||
|
||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.