Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
OPML, opp/ai, IMO
Read more about our work
How can the ORA network be used?




An AI Oracle powered by Optimistic Machine Learning (opML)





ORA provides chain-agnostic infrastructure that seamlessly connects AI and blockchain.


requestId












// language model
"deepseek-ai/DeepSeek-V3" = 0.15, # Per 1M Tokens
"deepseek-ai/DeepSeek-R1" = 1.35, # Per 1M Tokens
"meta-llama/Llama-3.3-70B-Instruct" = 0.68, # Per 1M Tokens
"meta-llama/Llama-3.2-3B-Instruct" = 0.05, # Per 1M Tokens
"meta-llama/Llama-2-13b-chat-hf" = 0.17, # Per 1M Tokens
"meta-llama/Llama-2-7b-chat-hf" = 0.15, # Per 1M Tokens
"meta-llama/Llama-3.1-405B-Instruct" = 2.69, # Per 1M Tokens
"meta-llama/Llama-3.2-1B-Instruct" = 0.05, # Per 1M Tokens
"meta-llama/Meta-Llama-3-8B-Instruct" = 0.14, # Per 1M Tokens
"google/gemma-2b-it" = 0.08, # Per 1M Tokens
"google/gemma-2-27b-it" = 0.62, # Per 1M Tokens
"google/gemma-2-9b-it" = 0.23, # Per 1M Tokens
"mistralai/Mistral-7B-Instruct-v0.3" = 0.15, # Per 1M Tokens
"mistralai/Mixtral-8x22B-Instruct-v0.1" = 0.92, # Per 1M Tokens
"mistralai/Mistral-7B-Instruct-v0.2" = 0.15, # Per 1M Tokens
"mistralai/Mixtral-8x7B-Instruct-v0.1" = 0.46, # Per 1M Tokens
"mistralai/Mistral-7B-Instruct-v0.1" = 0.15, # Per 1M Tokens
"Qwen/QwQ-32B-Preview" = 0.92, # Per 1M Tokens
"Qwen/Qwen2.5-Coder-32B-Instruct" = 0.62, # Per 1M Tokens
"Qwen/Qwen2.5-72B-Instruct" = 0.92, # Per 1M Tokens
"Qwen/Qwen2-72B-Instruct" = 0.96, # Per 1M Tokens
// image generation model
"black-forest-labs/FLUX.1-dev" = 0.020, # Per 1M Pixels @ 28 Steps
"black-forest-labs/FLUX.1-canny" = 0.020, # Per 1M Pixels @ 28 Steps
"black-forest-labs/FLUX.1-redux-dev" = 0.020, # Per 1M Pixels @ 28 Steps
"black-forest-labs/FLUX.1-schnell" = 0.006, # Per 1M Pixels @ 4 Steps
"stabilityai/stable-diffusion-3.5-large" = 0.05, # Per Image
"stabilityai/stable-diffusion-3.5-large-turbo" = 0.03, # Per Image
"stabilityai/stable-diffusion-3-medium" = 0.03, # Per Image
"stabilityai/stable-diffusion-3.5-medium" = 0.03, # Per Image
// video generation model
"KumoAnonymous/KumoVideo-Turbo" = 1, # Per Video
opML and opp/AI
forge snapshot --gas-report


// SPDX-License-Identifier: UNLICENSED
pragma solidity ^0.8.13;
import {Test, console2, Vm} from "forge-std/Test.sol";
import {Prompt} from "../src/Prompt.sol";
import {IAIOracle} from "OAO/contracts/interfaces/IAIOracle.sol";
import {OraSepoliaAddresses} from "./OraSepoliaAddresses.t.sol";
import "forge-std/console.sol";
contract EstimateGasLimitTest is Test, OraSepoliaAddresses {
Prompt prompt;
string rpc;
uint256 forkId;
uint256 modelId;
string result;
function setUp() public {
rpc = vm.envString("RPC_URL");
forkId = vm.createSelectFork(rpc);
prompt = new Prompt(IAIOracle(OAO_PROXY));
modelId = 50;
result = "Qmd3xWJVRao8AfgRTuXLCWBYj6VdVV8HFuKygk9FLTW5bi";
}
function test_estimateGasLimit() public {
uint256 requestId = prompt.calculateAIResult{value: prompt.estimateFee(modelId)}(modelId, "test generation");
uint256 before = gasleft();
vm.prank(OAO_PROXY);
prompt.aiOracleCallback(requestId, bytes(result), "");
uint256 afterCall = gasleft();
console.log(before - afterCall);
}
}modelId = 11;
result = "Beneath the sky, where whispers blend, A story stirs that has no end. The trees bow low, the rivers hum, A timeless tune, where life has sprung. The sun ascends with golden rays, To bless the fields and mark the days. The flowers bloom, their colors speak, Of beauty vast, profound, unique. The stars alight, a cosmic sea, Of dreams untold and destiny. Each twinkle holds a secret dear, A tale of love, of hope, of fear. Through valleys deep and mountains wide, The journey calls, the hearts true guide. With every step, a lesson found, In fleeting moments, wisdoms ground. Yet in the shadows, doubts may creep, A restless thought, a troubled sleep. But courage whispers, soft and clear, Youre not alone; Im always near. So carry forth, embrace the storm, Through fire and frost, the heart is warm. For lifes a dance, both wild and free, A fleeting song of harmony. As time unfurls its endless thread, The soul will soar where dreams are led. A symphony of lifes embrace, endless journey through boundless space. Beneath the moon, the rivers gleam, A quiet whisper shapes a dream. Through winds of change and skies of blue, I love you.";ORA Privacy Policy
Allocated $ORA for IMO Token for User
=
(Total Raising Target $ORA / Total Actual Raised $ORA)
*
Deposited $ORA of UserIntroduction
Supported AI Models and Deployment Addresses of ORA's AI Oracle.
Quick Start guides to the CLI

cd tora-docker-compose
mv .env.example .env && vim .envMAINNET_WSS=""
MAINNET_HTTP=""
SEPOLIA_WSS=""
SEPOLIA_HTTP=""CONFIRM_CHAINS="mainnet"
CONFIRM_CHAINS='["mainnet"]'
CONFIRM_CHAINS='["mainnet","sepolia"]'PRIV_KEY="0x..."CONFIRM_MODELS='[13]'CONFIRM_USE_CROSSCHECK=trueCONFIRM_CC_POLLING_INTERVAL=3000 # 3 sec in msBATCH_BLOCKS_MAX=600 # default 600 means blocks in 2 hours on ethCONFIRM_TASK_TTL=7200000CONFIRM_CC_TTL=7200000 # 2 hours in msTORA_ENV=productionREDIS_TTL=86400000 # 1 day in ms # ./tora-docker-compose
docker compose updocker container logs ora-tora2024-08-22 23:05:46 ora-openlm | Loading model into the right classes...
2024-08-22 23:05:46 ora-openlm | Loading checkpoint from disk...
2024-08-22 23:05:46 ora-openlm | * Serving Flask app 'olm_server'
2024-08-22 23:05:46 ora-openlm | * Debug mode: off
2024-08-22 23:05:46 ora-openlm | WARNING: This is a development server. Do not use it in a production deployment. Use a production WSGI server instead.
2024-08-22 23:05:46 ora-openlm | * Running on all addresses (0.0.0.0)
2024-08-22 23:05:46 ora-openlm | * Running on <http://127.0.0.1:5000>
2024-08-22 23:05:46 ora-openlm | * Running on <http://172.19.0.2:5000>
2024-08-22 23:05:46 ora-openlm | Press CTRL+C to quit
2024-08-22 23:05:52 ora-openlm | 172.19.0.4 - - [22/Aug/2024 15:05:52] "GET /hash HTTP/1.1" 200 -
2024-08-22 23:05:52 ora-tora | 3:05:52 PM [confirm] [+] model servers [id=13] is up.
2024-08-22 23:05:52 ora-tora | 3:05:52 PM [confirm] listening on provider.network: sepolia
2024-08-22 23:05:52 ora-tora | 3:05:52 PM [confirm] [+] RPC Server running on port 50013:11:00 PM [confirm] receive event in tx: 0x3b6f68368620d26af54ad3104db41eca4b38a593c680546a1cb3f62eed0699f65:23:59 PM [confirm] confirm at txhash: 0x940a59e3dbeb232ceeea4083d0175f0eefe629def3931eee7c5356d97f608525not support model 11 function calculateAIResult(uint256 model1Id, uint256 model2Id, string calldata model1Prompt) payable external returns (uint256) {
bytes memory input = bytes(model1Prompt);
uint256 model1Fee = estimateFee(model1Id);
uint256 requestId = aiOracle.requestCallback{value: model1Fee}(
model1Id, input, address(this), callbackGasLimit[model1Id], abi.encode(model2Id)
);
AIOracleRequest storage request = requests[requestId];
request.input = input;
request.sender = msg.sender;
request.modelId = model1Id;
emit promptRequest(requestId, msg.sender, model1Id, model1Prompt);
return requestId;
}function aiOracleCallback(uint256 requestId, bytes calldata output, bytes calldata callbackData) external payable override onlyAIOracleCallback() {
AIOracleRequest storage request = requests[requestId];
require(request.sender != address(0), "request does not exist");
request.output = output;
prompts[request.modelId][string(request.input)] = string(output);
//if callbackData is not empty decode it and call another inference
if(callbackData.length != 0){
(uint256 model2Id) = abi.decode(callbackData, (uint256));
uint256 model2Fee = estimateFee(model2Id);
(bool success, bytes memory data) = address(aiOracle).call{value: model2Fee}(abi.encodeWithSignature("requestCallback(uint256,bytes,address,uint64,bytes)", model2Id, output, address(this), callbackGasLimit[model2Id], ""));
require(success, "failed to call nested inference");
(uint256 rid) = abi.decode(data, (uint256));
AIOracleRequest storage recursiveRequest = requests[rid];
recursiveRequest.input = output;
recursiveRequest.sender = msg.sender;
recursiveRequest.modelId = model2Id;
emit promptRequest(rid, msg.sender, model2Id, "");
}
emit promptsUpdated(requestId, request.modelId, string(request.input), string(output), callbackData);
}PromptNestedInference prompt = new PromptNestedInference(IAIOracle(OAO_PROXY));
uint256 stableDiffusionFee = prompt.estimateFee(STABLE_DIFFUSION_ID);
uint256 llamaFee = prompt.estimateFee(LLAMA_ID);
uint256 requestId = prompt.calculateAIResult{value: ((stableDiffusionFee + llamaFee)*11/10)}(STABLE_DIFFUSION_ID, LLAMA_ID, SD_PROMPT);function calculateAIResult(uint256 modelId, string calldata prompt, uint256 batchSize) payable external {
bytes memory input = bytes(prompt);
bytes memory callbackData = bytes("");
address callbackAddress = address(this);
uint256 requestId = aiOracle.requestBatchInference{value: msg.value}(
batchSize, modelId, input, callbackAddress, callbackGasLimit[modelId], callbackData, IAIOracle.DA(0), IAIOracle.DA(0)
);
}//Prompt.sol
function estimateFeeBatch(uint256 modelId) public view returns (uint256) {
return aiOracle.estimateFee(modelId, callbackGasLimit[modelId]);
}//AIOracle.sol
function estimateFeeBatch(uint256 modelId, uint256 gasLimit, uint256 batchSize) public view ifModelExists(modelId) returns (uint256) {
ModelData storage model = models[modelId];
return batchSize * model.fee + gasPrice * gasLimit;
}[
// Batch inference requests with required "prompt" and optional "seed" or "seed_range".
{
"prompt": "prompt1", // Mandatory prompt
"seed": 1 // Optional seed for response variation
},
{
"prompt": "prompt2",
"seed": 2
},
{
"prompt": "prompt3" // No seed provided, default behavior
},
{
"prompt": "prompt4",
"seed_range": [1, 2] // Seed range for controlled variation
}
]result1.result2.result3...[{"prompt":"generate image of an elephant","seed":1},{"prompt":"generate image of an elephant","seed":2}].Qma3gR6z9dtSQ3fsoSUwQtHnNMYZEHhuJx3DBqx677HPaj.QmemPcnzdyigx1XnAvskVKmEhnsivBuE3Qkp3jJqfdWWxCimport { Web3 } from "web3";
const web3 = new Web3(process.env.RPC_URL);
const wallet = web3.eth.accounts.wallet.add(process.env.PRIVATE_KEY); // Make sure you have funds
const contract = new web3.eth.Contract(batchInference_abi, batchInference_address);
const prompt = `[{"prompt":"what's the best fruit", "seed":1},{"prompt":"what's the best fruit", "seed":2},{"prompt":"kiwi"},{"prompt":"dog", "seed_range":[1,2]}]`
const fee = Number(await contract.methods.estimateFeeBatch(11, 5).call());
const totalFee = (fee*11/10)
const tx = await contract.methods.requestBatchInference(11, prompt, 5).send({from: wallet[0].address, value: totalFee});
console.log("Tx: ", tx)
setTimeout(async () => {
const result = await contract.methods.prompts(11, prompt).call();
console.log("Result: ", result)
}, 30000);// Example 1: Batch with identical prompts but different seeds for varied responses
const prompt1 = `[{"prompt":"hello","seed":1},{"prompt":"hello","seed":2}]`
// Example 2: Batch with different prompts, no seeds specified, uses default behavior
const prompt2 = `[{"prompt":"hello"},{"prompt":"hello world"}]`
// Example 3: Batch with same question but varied seeds, and different prompts with a seed range
const prompt3 = `[{"prompt":"what's the best fruit", "seed":1},{"prompt":"what's the best fruit", "seed":2},{"prompt":"kiwi"},{"prompt":"dog","seed_range":[1,2]}]`
// Example 4: Batch with identical prompt but different seeds, plus a seed range for a specific prompt
const prompt4 = `[{"prompt":"apple", "seed":1}, {"prompt":"apple", "seed":2}, {"prompt":"banana"}, {"prompt":"cat", "seed_range": [1,100] }]`Scalable machine learning (ML) inference onchain
ORA Terms of Use
ssh-keygen -t rsa -b 4096 -C "[email protected]"docker build -t ubuntu-opml-dev .git clone [email protected]:OPML-Labs/mlgo.gitcd mlgopip install -r requirements.txtcd examples/mnistpython3 convert-h5-to-ggml.py models/mnist/mnist-small.state_dictcd ../mnist_mips && ./build.sh# How to run instructions:
# 1. Generate ssh command: ssh-keygen -t rsa -b 4096 -C "[email protected]"
# - Save the key in local repo where Dockerfile is placed as id_rsa
# - Add the public key to the GitHub account
# 2. Build docker image: docker build -t ubuntu-opml-dev .
# 3. Run the hardhat: docker run -it --rm --name ubuntu-opml-dev-container ubuntu-opml-dev bash -c "npx hardhat node"
# 4. Run the challange script on the same container: docker exec -it ubuntu-opml-dev-container bash -c "./demo/challenge_simple.sh"
# Use an official Ubuntu as a parent image
FROM ubuntu:22.04# Set environment variables to non-interactive to avoid prompts during package installations
ENV DEBIAN_FRONTEND=noninteractive
# Update the package list and install dependencies
RUN apt-get update && apt-get install -y \
build-essential \
cmake \
git \
golang \
wget \
curl \
python3 \
python3-pip \
python3-venv \
unzip \
file \
openssh-client \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
# Install Node.js and npm
RUN curl -fsSL https://deb.nodesource.com/setup_18.x | bash - && \
apt-get install -y nodejs# Copy SSH keys into the container
COPY id_rsa /root/.ssh/id_rsa
RUN chmod 600 /root/.ssh/id_rsa
# Configure SSH to skip host key verification
RUN echo "Host *\n\tStrictHostKeyChecking no\n" >> /root/.ssh/config# Set the working directory
WORKDIR /root
# Clone the OPML repository
RUN git clone [email protected]:ora-io/opml.git --recursive
WORKDIR /root/opml# Build the OPML project
RUN make build
# Change permission for the challenge script
RUN chmod +x demo/challenge_simple.sh
# Default command
CMD ["bash"]| Model Name | Price per Call in $ORA |
|---------------------------------------------|------------------------|
| meta-llama/Llama-3.3-70B-Instruct | 0.18 |
| Qwen/QwQ-32B-Preview | 0.24 |
| Qwen/Qwen2.5-Coder-32B-Instruct | 0.16 |
| meta-llama/Llama-3.2-3B-Instruct | 0.01 |
| mistralai/Mixtral-8x22B-Instruct-v0.1 | 0.24 |
| meta-llama/Meta-Llama-3-70B-Instruct | 0.18 |
| Qwen/Qwen2-72B-Instruct | 0.18 |
| google/gemma-2-27b-it | 0.16 |
| google/gemma-2-9b-it | 0.06 |
| mistralai/Mistral-7B-Instruct-v0.3 | 0.04 |
| google/gemma-2b-it | 0.02 |
| mistralai/Mistral-7B-Instruct-v0.2 | 0.04 |
| mistralai/Mixtral-8x7B-Instruct-v0.1 | 0.12 |
| mistralai/Mistral-7B-Instruct-v0.1 | 0.04 |
| meta-llama/Llama-2-13b-chat-hf | 0.04 |
| meta-llama/Llama-2-7b-chat-hf | 0.04 |
| meta-llama/Llama-3.1-405B-Instruct | 0.7 |
| Qwen/Qwen2.5-72B-Instruct | 0.24 |
| meta-llama/Llama-3.2-1B-Instruct | 0.01 |
| meta-llama/Meta-Llama-3-8B-Instruct | 0.04 |
| black-forest-labs/FLUX.1-dev | 0.09 |
| black-forest-labs/FLUX.1-canny | 0.09 |
| black-forest-labs/FLUX.1-redux-dev | 0.09 |
| black-forest-labs/FLUX.1-schnell | 0.01 |
| deepseek-ai/DeepSeek-V3 | 0.25 |
| stabilityai/stable-diffusion-3.5-medium | 0.13 |
| stabilityai/stable-diffusion-3-medium | 0.13 |
| stabilityai/stable-diffusion-3.5-large | 0.24 |
| stabilityai/stable-diffusion-3.5-large-turbo| 0.15 |// solidity
function calcModelIdByName(string calldata modelName) public pure returns (uint256) {
return uint256(uint160(uint256(keccak256(bytes(modelName)))));
}// JavaScript
// example modelIdString: 'openai/gpt-4o'
function modelIdStringToBigInt(modelIdString) {
const hashedValue = ethers.keccak256(ethers.toUtf8Bytes(modelIdString));
const addressValue = `0x${hashedValue.slice(26)}`;
return BigInt(addressValue);
}docker run -it --rm --name ubuntu-opml-dev-container ubuntu-opml-dev bash -c "npx hardhat node"docker exec -it ubuntu-opml-dev-container bash -c "./demo/challenge_simple.sh"curl -X POST "https://api.ora.io/v1/chat/completions" \
-H "Authorization: Bearer $ORA_API_KEY" \
-H "Content-Type: application/json" \
-d '{
"model": "deepseek-ai/DeepSeek-V3",
"messages": [{"role": "user", "content": "What are some fun things to do in New York?"}]
}'{
"object": "chat.completion",
"model": "deepseek-reasoner",
"choices": [
{
"index": 0,
"message": {
"role": "assistant",
"content": "The word \"strawberry\" is spelled as **S-T-R-A-W-B-E-R-R-Y**...",
"reasoning_content": "Okay, so the user is asking..."
},
"logprobs": null,
"finish_reason": "stop"
}
],
"usage": {
"prompt_tokens": 15,
"completion_tokens": 1149,
"total_tokens": 1164,
"completion_tokens_details": {
"reasoning_tokens": 1024
}
},
"requestHash": "309904099dc718859f46bea07078214bff25b812440d7f12fcab5af98e10a4c5",
"responseHash": "ecc1542ec60277e8e6a6ece5882b084a7f4ea71f5cc14b26471930814e1f8776"
}{
"object": "chat.completion",
"model": "deepseek-chat",
// choices, usage, requestHash, responseHash
"search_result": [
{
"url": "http://www.espn.com/nfl/superbowl/history/winners",
"context": "View a comprehensive list of every single NFL Super Bowl champion from 1967 to present on ESPN. Includes the finals opponent, site of the game and the final ...",
"source_from": "Google"
},
{
"url": "https://blog.ticketmaster.com/super-bowl-winners/",
"context": "The Kansas City Chiefs defeated the Philadelphia Eagles to win 38–35, led by a stellar performance from quarterback Patrick Mahomes.",
"source_from": "Google"
},
{
"url": "https://nflplayoffpass.com/super-bowl-winners/",
"context": "Explore a comprehensive list of Super Bowl winners from 1967 to 2025, including the latest champions and Super Bowl LIX game results. ... Super Bowl Year Winner Opposition Score Stadium; LIX: 2025: Philadelphia Eagles: Kansas City Chiefs: 40-22: Caesars Superdome: LVIII: 2024: Kansas City Chiefs: San Francisco 49ers:",
"source_from": "DuckDuckGo"
},
{
"url": "https://en.wikipedia.org/wiki/List_of_Super_Bowl_champions",
"context": "The Packers defeated the Chiefs in the first AFL-NFL World Championship Game (Super Bowl I).. The Super Bowl is the annual American football game that determines the champion of the National Football League (NFL). The game culminates a season that begins in the previous calendar year, and is the conclusion of the NFL playoffs.The winner receives the Vince Lombardi Trophy.",
"source_from": "DuckDuckGo"
}
],
}curl -X POST "https://api.ora.io/v1/images/generations" \
-H "Authorization: Bearer $ORA_API_KEY" \
-H "Content-Type: application/json" \
-d '{
"model": "black-forest-labs/FLUX.1-dev",
"prompt": "Cats eating popcorn",
"steps": 10,
"n": 4
}'{
"model": "black-forest-labs/FLUX.1-dev",
"object": "list",
"data": [
{
"index": 0,
"url": "https://gateway.pinata.cloud/ipfs/QmQBv5cDB12fsYuwYC6VNHL2o8hLEzs5XuPaCH1PT3s5qT"
}
],
"requestHash": "81112e1d9b3c7a547d5c5623218e5ceccc21ebe367b6ca096541f63664847056",
"responseHash": "5eb34cd24096a77a2174ef870454fe47c6096c4d87a3d1559ab06c88624f4a94"
}# POST - generate video
curl -X POST "https://api.ora.io/v1/videos/generations" \
-H "Authorization: Bearer $ORA_API_KEY" \
-H "Content-Type: application/json" \
-d '{
"model": ""KumoAnonymous/KumoVideo-Turbo",
"prompt": "NYC Is A Great City",
"seed": 0
}'
# GET - get generation result
curl -X GET "https://api.ora.io/v1/videos/result/$generation_id" \
--header "Content-Type: application/json" \
--header "Authorization: Bearer $ORA_API_KEY"// POST - generate video
{
"task_id": "49aaa1dd-2c32-4bb3-a5b0-b2c887825b86",
"hash_val": "6a042e860b658f973ab49b22ea565b4e19750fe7023ef23fcdef7b0960c63050",
"status": "PROCESSING",
"content": "",
"requestHash": "82a1fa2d3edd3557ca2dddcfdef50dff5c9a19fa6c17bb2490aeee110b8b29d7",
"responseHash": "d52de3784e1603d5977a5d522fafdefd3970bfd82b344bc59b54ee543502deb8"
}
// GET - get generation result
{
"task_id": "1d73902d-cb58-4d4e-bb40-eb38bc13739a",
"hash_val": "6a042e860b658f973ab49b22ea565b4e19750fe7023ef23fcdef7b0960c63050",
"status": "OK",
"video": "https://gateway.pinata.cloud/ipfs/QmUSZ3g5tQCYP38gukE73SFMKGEge2gNoVyDjXBiAKNqJy"
}import openai
# Define your query
system_content = "You are a helpful assistant."
user_content = "What are some fun things to do in New York?"
# Set your ORA API key
ORA_API_KEY = "YOUR_ORA_API_KEY"
# Initialize the client
client = openai.OpenAI(
api_key=ORA_API_KEY,
base_url="https://api.ora.io/v1",
)
# Perform a chat completion
chat_completion = client.chat.completions.create(
model="deepseek-ai/DeepSeek-V3",
messages=[
{"role": "system", "content": system_content},
{"role": "user", "content": user_content},
]
)
# Print the response
response = chat_completion.choices[0].message.content
print("Response:\n", response)source .envgit clone -b OAO_interaction_tutorial [email protected]:ora-io/Interaction_With_OAO_Template.git --recursivecd Interaction_With_OAO_Templatecp .env.example .envforge installimport "OAO/contracts/interfaces/IAIOracle.sol";
import "OAO/contracts/AIOracleCallbackReceiver.sol";constructor(IAIOracle _aiOracle) AIOracleCallbackReceiver(_aiOracle){}function calculateAIResult(uint256 modelId, string calldata prompt) payable external {
bytes memory input = bytes(prompt);
bytes memory callbackData = bytes("");
address callbackAddress = address(this);
uint256 requestId = aiOracle.requestCallback{value: msg.value}(
modelId, input, callbackAddress, callbackGasLimit[modelId], callbackData
);
}address owner;
modifier onlyOwner() {
require(msg.sender == owner, "Only owner");
_;
}
mapping(uint256 => uint64) public callbackGasLimit;
function setCallbackGasLimit(uint256 modelId, uint64 gasLimit) external onlyOwner {
callbackGasLimit[modelId] = gasLimit;
}
constructor(IAIOracle _aiOracle) AIOracleCallbackReceiver(_aiOracle) {
owner = msg.sender;
callbackGasLimit[50] = 500_000; // Stable-Diffusion
callbackGasLimit[11] = 5_000_000; // Llama
}event promptRequest(
uint256 requestId,
address sender,
uint256 modelId,
string prompt
);
struct AIOracleRequest {
address sender;
uint256 modelId;
bytes input;
bytes output;
}
mapping(uint256 => AIOracleRequest) public requests;
function calculateAIResult(uint256 modelId, string calldata prompt) payable external {
bytes memory input = bytes(prompt);
bytes memory callbackData = bytes("");
address callbackAddress = address(this);
uint256 requestId = aiOracle.requestCallback{value: msg.value}(
modelId, input, callbackAddress, callbackGasLimit[modelId], callbackData
);
AIOracleRequest storage request = requests[requestId];
request.input = input;
request.sender = msg.sender;
request.modelId = modelId;
emit promptRequest(requestId, msg.sender, modelId, prompt);
}event promptsUpdated(
uint256 requestId,
uint256 modelId,
string input,
string output,
bytes callbackData
);
mapping(uint256 => mapping(string => string)) public prompts;
function getAIResult(uint256 modelId, string calldata prompt) external view returns (string memory) {
return prompts[modelId][prompt];
}
function aiOracleCallback(uint256 requestId, bytes calldata output, bytes calldata callbackData) external override onlyAIOracleCallback() {
AIOracleRequest storage request = requests[requestId];
require(request.sender != address(0), "request does not exist");
request.output = output;
prompts[request.modelId][string(request.input)] = string(output);
emit promptsUpdated(requestId, request.modelId, string(request.input), string(output), callbackData);
}function estimateFee(uint256 modelId) public view returns (uint256) {
return aiOracle.estimateFee(modelId, callbackGasLimit[modelId]);
}// SPDX-License-Identifier: MIT
pragma solidity ^0.8.13;
import "OAO/contracts/interfaces/IAIOracle.sol";
import "OAO/contracts/AIOracleCallbackReceiver.sol";
contract Prompt is AIOracleCallbackReceiver {
event promptsUpdated(
uint256 requestId,
uint256 modelId,
string input,
string output,
bytes callbackData
);
event promptRequest(
uint256 requestId,
address sender,
uint256 modelId,
string prompt
);
struct AIOracleRequest {
address sender;
uint256 modelId;
bytes input;
bytes output;
}
address owner;
modifier onlyOwner() {
require(msg.sender == owner, "Only owner");
_;
}
mapping(uint256 => AIOracleRequest) public requests;
mapping(uint256 => uint64) public callbackGasLimit;
constructor(IAIOracle _aiOracle) AIOracleCallbackReceiver(_aiOracle) {
owner = msg.sender;
callbackGasLimit[50] = 500_000; // Stable-Diffusion
callbackGasLimit[11] = 5_000_000; // Llama
}
function setCallbackGasLimit(uint256 modelId, uint64 gasLimit) external onlyOwner {
callbackGasLimit[modelId] = gasLimit;
}
mapping(uint256 => mapping(string => string)) public prompts;
function getAIResult(uint256 modelId, string calldata prompt) external view returns (string memory) {
return prompts[modelId][prompt];
}
function aiOracleCallback(uint256 requestId, bytes calldata output, bytes calldata callbackData) external override onlyAIOracleCallback() {
AIOracleRequest storage request = requests[requestId];
require(request.sender != address(0), "request does not exist");
request.output = output;
prompts[request.modelId][string(request.input)] = string(output);
emit promptsUpdated(requestId, request.modelId, string(request.input), string(output), callbackData);
}
function estimateFee(uint256 modelId) public view returns (uint256) {
return aiOracle.estimateFee(modelId, callbackGasLimit[modelId]);
}
function calculateAIResult(uint256 modelId, string calldata prompt) payable external {
bytes memory input = bytes(prompt);
bytes memory callbackData = bytes("");
address callbackAddress = address(this);
uint256 requestId = aiOracle.requestCallback{value: msg.value}(
modelId, input, callbackAddress, callbackGasLimit[modelId], callbackData
);
AIOracleRequest storage request = requests[requestId];
request.input = input;
request.sender = msg.sender;
request.modelId = modelId;
emit promptRequest(requestId, msg.sender, modelId, prompt);
}
}
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.13;
import {Script} from "forge-std/Script.sol";
import {Prompt} from "../src/Prompt.sol";
import {IAIOracle} from "OAO/contracts/interfaces/IAIOracle.sol";
contract PromptScript is Script {
address OAO_PROXY;
function setUp() public {
OAO_PROXY = [OAO_PROXY_address_here];
}
function run() public {
uint privateKey = vm.envUint("PRIVATE_KEY");
vm.startBroadcast(privateKey);
new Prompt(IAIOracle(OAO_PROXY));
vm.stopBroadcast();
}
}forge script script/Prompt.s.sol --rpc-url $RPC_URL --broadcast --verify --etherscan-api-key $ETHERSCAN_KEY






