Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Library functions internal #9

Open
wants to merge 7 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -102,4 +102,6 @@ lint/tmp/
gas-report.txt

contracts/test/fuzzing/crytic-export
temp
temp

yarn.lock
11 changes: 9 additions & 2 deletions contracts/DealStatus.sol
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,13 @@ contract DealStatus is IAggregatorOracle, Proof {
transactionId = 0;
}

function computeExpectedAuxDataPublic(
InclusionProof memory _proof,
InclusionVerifierData memory _verifierData
) public pure returns (InclusionAuxData memory) {
return computeExpectedAuxData(_proof, _verifierData);
}

function submit(bytes memory _cid) external returns (uint256) {
// Increment the transaction ID
transactionId++;
Expand Down Expand Up @@ -67,7 +74,7 @@ contract DealStatus is IAggregatorOracle, Proof {
bytes memory cid = txIdToCid[_id];
for (uint256 i = 0; i < cidToDeals[cid].length; i++) {
if (cidToDeals[cid][i].dealId == _dealId) {
return this.computeExpectedAuxData(_proof, _verifierData);
return this.computeExpectedAuxDataPublic(_proof, _verifierData);
}
}

Expand All @@ -76,7 +83,7 @@ contract DealStatus is IAggregatorOracle, Proof {

// Perform validation logic
// return this.computeExpectedAuxDataWithDeal(_dealId, _proof, _verifierData);
return this.computeExpectedAuxData(_proof, _verifierData);
return this.computeExpectedAuxDataPublic(_proof, _verifierData);
}

// allDealIds should return all the deal ids created by the aggregator
Expand Down
13 changes: 8 additions & 5 deletions contracts/data-segment/Cid.sol
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,14 @@ import "./Const.sol";

library Cid {
// cidToPieceCommitment converts a CID to a piece commitment.
function cidToPieceCommitment(bytes memory _cb) public pure returns (bytes32) {
require(_cb.length == CID_COMMP_HEADER_LENGTH + MERKLE_TREE_NODE_SIZE, "wrong length of CID");
function cidToPieceCommitment(bytes memory _cb) internal pure returns (bytes32) {
require(
keccak256(abi.encodePacked(_cb[0], _cb[1], _cb[2], _cb[3], _cb[4], _cb[5], _cb[6]))
== keccak256(abi.encodePacked(CID_COMMP_HEADER)),
_cb.length == CID_COMMP_HEADER_LENGTH + MERKLE_TREE_NODE_SIZE,
"wrong length of CID"
);
require(
keccak256(abi.encodePacked(_cb[0], _cb[1], _cb[2], _cb[3], _cb[4], _cb[5], _cb[6])) ==
keccak256(abi.encodePacked(CID_COMMP_HEADER)),
"wrong content of CID header"
);
bytes32 res;
Expand All @@ -22,7 +25,7 @@ library Cid {
}

// pieceCommitmentToCid converts a piece commitment to a CID.
function pieceCommitmentToCid(bytes32 _commp) public pure returns (bytes memory) {
function pieceCommitmentToCid(bytes32 _commp) internal pure returns (bytes memory) {
bytes memory cb = abi.encodePacked(CID_COMMP_HEADER, _commp);
return cb;
}
Expand Down
84 changes: 55 additions & 29 deletions contracts/data-segment/Proof.sol
Original file line number Diff line number Diff line change
Expand Up @@ -15,28 +15,38 @@ contract Proof {
using Cid for bytes32;

// computeExpectedAuxData computes the expected auxiliary data given an inclusion proof and the data provided by the verifier.
function computeExpectedAuxData(InclusionProof memory ip, InclusionVerifierData memory verifierData)
public
pure
returns (InclusionAuxData memory)
{
require(isPow2(uint64(verifierData.sizePc)), "Size of piece provided by verifier is not power of two");
function computeExpectedAuxData(
InclusionProof memory ip,
InclusionVerifierData memory verifierData
) internal pure returns (InclusionAuxData memory) {
require(
isPow2(uint64(verifierData.sizePc)),
"Size of piece provided by verifier is not power of two"
);

bytes32 commPc = verifierData.commPc.cidToPieceCommitment();
bytes32 assumedCommPa = computeRoot(ip.proofSubtree, commPc);

(bool ok, uint64 assumedSizePa) =
checkedMultiply(uint64(1) << uint64(ip.proofSubtree.path.length), uint64(verifierData.sizePc));
(bool ok, uint64 assumedSizePa) = checkedMultiply(
uint64(1) << uint64(ip.proofSubtree.path.length),
uint64(verifierData.sizePc)
);
require(ok, "assumedSizePa overflow");

uint64 dataOffset = ip.proofSubtree.index * uint64(verifierData.sizePc);
SegmentDesc memory en = makeDataSegmentIndexEntry(Fr32(commPc), dataOffset, uint64(verifierData.sizePc));
SegmentDesc memory en = makeDataSegmentIndexEntry(
Fr32(commPc),
dataOffset,
uint64(verifierData.sizePc)
);
bytes32 enNode = truncatedHash(serialize(en));
bytes32 assumedCommPa2 = computeRoot(ip.proofIndex, enNode);
require(assumedCommPa == assumedCommPa2, "aggregator's data commitments don't match");

(bool ok2, uint64 assumedSizePa2) =
checkedMultiply(uint64(1) << uint64(ip.proofIndex.path.length), BYTES_IN_DATA_SEGMENT_ENTRY);
(bool ok2, uint64 assumedSizePa2) = checkedMultiply(
uint64(1) << uint64(ip.proofIndex.path.length),
BYTES_IN_DATA_SEGMENT_ENTRY
);
require(ok2, "assumedSizePau64 overflow");
require(assumedSizePa == assumedSizePa2, "aggregator's data size doesn't match");

Expand All @@ -50,34 +60,46 @@ contract Proof {
uint64 dealId,
InclusionProof memory ip,
InclusionVerifierData memory verifierData
) public returns (InclusionAuxData memory) {
) internal returns (InclusionAuxData memory) {
InclusionAuxData memory inclusionAuxData = computeExpectedAuxData(ip, verifierData);
validateInclusionAuxData(dealId, inclusionAuxData);
return inclusionAuxData;
}

// validateInclusionAuxData validates that the deal is activated and not terminated.
function validateInclusionAuxData(uint64 dealId, InclusionAuxData memory inclusionAuxData) internal {
function validateInclusionAuxData(
uint64 dealId,
InclusionAuxData memory inclusionAuxData
) internal {
// check that the deal is not terminated
MarketTypes.GetDealActivationReturn memory dealActivation = MarketAPI.getDealActivation(dealId);
MarketTypes.GetDealActivationReturn memory dealActivation = MarketAPI.getDealActivation(
dealId
);
require(dealActivation.terminated <= 0, "Deal is terminated");
require(dealActivation.activated > 0, "Deal is not activated");

MarketTypes.GetDealDataCommitmentReturn memory dealDataCommitment = MarketAPI.getDealDataCommitment(dealId);
require(keccak256(dealDataCommitment.data) == keccak256(inclusionAuxData.commPa), "Deal commD doesn't match");
MarketTypes.GetDealDataCommitmentReturn memory dealDataCommitment = MarketAPI
.getDealDataCommitment(dealId);
require(
keccak256(dealDataCommitment.data) == keccak256(inclusionAuxData.commPa),
"Deal commD doesn't match"
);
require(dealDataCommitment.size == inclusionAuxData.sizePa, "Deal size doesn't match");
}

// validateIndexEntry validates that the index entry is in the correct position in the index.
function validateIndexEntry(InclusionProof memory ip, uint64 assumedSizePa2) internal pure {
uint64 idxStart = indexAreaStart(assumedSizePa2);
(bool ok3, uint64 indexOffset) = checkedMultiply(ip.proofIndex.index, BYTES_IN_DATA_SEGMENT_ENTRY);
(bool ok3, uint64 indexOffset) = checkedMultiply(
ip.proofIndex.index,
BYTES_IN_DATA_SEGMENT_ENTRY
);
require(ok3, "indexOffset overflow");
require(indexOffset >= idxStart, "index entry at wrong position");
}

// computeRoot computes the root of a Merkle tree given a leaf and a Merkle proof.
function computeRoot(ProofData memory d, bytes32 subtree) public pure returns (bytes32) {
function computeRoot(ProofData memory d, bytes32 subtree) internal pure returns (bytes32) {
require(d.path.length < 64, "merkleproofs with depths greater than 63 are not supported");
require(d.index >> d.path.length == 0, "index greater than width of the tree");

Expand All @@ -98,7 +120,7 @@ contract Proof {
}

// computeNode computes the parent node of two child nodes
function computeNode(bytes32 left, bytes32 right) public pure returns (bytes32) {
function computeNode(bytes32 left, bytes32 right) internal pure returns (bytes32) {
bytes32 digest = sha256(abi.encodePacked(left, right));
return truncate(digest);
}
Expand Down Expand Up @@ -145,7 +167,11 @@ contract Proof {
}

// verify verifies that the given leaf is present in the merkle tree with the given root.
function verify(ProofData memory proof, bytes32 root, bytes32 leaf) public pure returns (bool) {
function verify(
ProofData memory proof,
bytes32 root,
bytes32 leaf
) internal pure returns (bool) {
return computeRoot(proof, leaf) == root;
}

Expand All @@ -159,25 +185,25 @@ contract Proof {
}

// hashNode hashes the given node with the given left child.
function hashNode(bytes32 left, bytes32 right) public pure returns (bytes32) {
function hashNode(bytes32 left, bytes32 right) internal pure returns (bytes32) {
bytes32 truncatedData = sha256(abi.encodePacked(left, right));
truncatedData &= TRUNCATOR;
return truncatedData;
}

// truncatedHash computes the truncated hash of the given data.
function truncatedHash(bytes memory data) public pure returns (bytes32) {
function truncatedHash(bytes memory data) internal pure returns (bytes32) {
bytes32 truncatedData = sha256(abi.encodePacked(data));
truncatedData &= TRUNCATOR;
return truncatedData;
}

// makeDataSegmentIndexEntry creates a new data segment index entry.
function makeDataSegmentIndexEntry(Fr32 memory commP, uint64 offset, uint64 size)
internal
pure
returns (SegmentDesc memory)
{
function makeDataSegmentIndexEntry(
Fr32 memory commP,
uint64 offset,
uint64 size
) internal pure returns (SegmentDesc memory) {
SegmentDesc memory en;
en.commDs = bytes32(commP.value);
en.offset = offset;
Expand All @@ -187,15 +213,15 @@ contract Proof {
}

// computeChecksum computes the checksum of the given segment description.
function computeChecksum(SegmentDesc memory _sd) public pure returns (bytes16) {
function computeChecksum(SegmentDesc memory _sd) internal pure returns (bytes16) {
bytes memory serialized = serialize(_sd);
bytes32 digest = sha256(serialized);
digest &= hex"ffffffffffffffffffffffffffffff3f";
return bytes16(digest);
}

// serialize serializes the given segment description.
function serialize(SegmentDesc memory sd) public pure returns (bytes memory) {
function serialize(SegmentDesc memory sd) internal pure returns (bytes memory) {
bytes memory result = new bytes(ENTRY_SIZE);

// Pad commDs
Expand Down
71 changes: 32 additions & 39 deletions deploy/00_deploy.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,46 +8,39 @@ const wallet = new ethers.Wallet(private_key, ethers.provider)

module.exports = async ({ deployments }) => {
// ethers is available in the global scope
const [deployer] = await ethers.getSigners();
console.log(
"Deploying the contracts with the account:",
await deployer.getAddress()
);

console.log("Account balance:", (await deployer.getBalance()).toString());

const accounts = await ethers.getSigners();
//console.log(accounts[0])

console.log("Wallet Ethereum Address:", wallet.address);
const chainId = network.config.chainId;

//deploy DealStatus
const Cid = await ethers.getContractFactory('Cid', accounts[0]);
console.log('Deploying Cid...');
const cid = await Cid.deploy();
await cid.deployed()
console.log('Cid deployed to:', cid.address);

//deploy DealStatus
const Proof = await ethers.getContractFactory('Proof', {
libraries: {
Cid: cid.address,
},
});
console.log('Deploying Proof...');
const proof = await Proof.deploy();
await proof.deployed()
console.log('Proof deployed to:', proof.address);
const [deployer] = await ethers.getSigners()
console.log("Deploying the contracts with the account:", await deployer.getAddress())

console.log("Account balance:", (await deployer.getBalance()).toString())

const accounts = await ethers.getSigners()
console.log(accounts[0])

console.log("Wallet Ethereum Address:", wallet.address)
const chainId = network.config.chainId

// //deploy DealStatus
// const Cid = await ethers.getContractFactory('Cid', accounts[0]);
// console.log('Deploying Cid...');
// const cid = await Cid.deploy();
// await cid.deployed()
// console.log('Cid deployed to:', cid.address);

// //deploy DealStatus
// const Proof = await ethers.getContractFactory('Proof', {
// libraries: {
// Cid: cid.address,
// },
// });
// console.log('Deploying Proof...');
// const proof = await Proof.deploy();
// await proof.deployed()
// console.log('Proof deployed to:', proof.address);

//deploy DealStatus
const dealStatus = await ethers.getContractFactory('DealStatus', {
libraries: {
Cid: cid.address,
},
});
console.log('Deploying DealStatus...');
const dealstatus = await dealStatus.deploy();
const dealStatus = await ethers.getContractFactory("DealStatus", accounts[0])
console.log("Deploying DealStatus...")
const dealstatus = await dealStatus.deploy()
await dealstatus.deployed()
console.log('DealStatus deployed to:', dealstatus.address);
console.log("DealStatus deployed to:", dealstatus.address)
}
12 changes: 6 additions & 6 deletions hardhat.config.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,13 +10,13 @@ module.exports = {
solidity: {
version: "0.8.17",
settings: {
optimizer: {
enabled: true,
runs: 1000,
details: { yul: false },
},
optimizer: {
enabled: true,
runs: 1000,
details: { yul: false },
},
},
},
},
defaultNetwork: "calibrationnet",
mocha: {
timeout: 100000000
Expand Down