Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Policy upload to Filebase/IPFS #639

Open
wants to merge 10 commits into
base: master
Choose a base branch
from
4 changes: 3 additions & 1 deletion contracts/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,8 @@
"typescript": "^4.6.4"
},
"dependencies": {
"@kleros/vea-contracts": "^0.1.12"
"@kleros/vea-contracts": "^0.1.12",
"aws-sdk": "^2.1329.0",
"uuid": "^9.0.0"
}
}
117 changes: 117 additions & 0 deletions contracts/scripts/policyUpdate.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,117 @@
import { deployments, getNamedAccounts, getChainId, ethers } from "hardhat";
import { PolicyRegistry } from "../typechain-types";
import fs from "fs";
const AWS = require("aws-sdk");
alcercu marked this conversation as resolved.
Show resolved Hide resolved
const { v4: uuidv4 } = require("uuid");
const id = uuidv4();
const currentDate = new Date();
const formattedDate = `${currentDate.getFullYear()}/${(currentDate.getMonth() + 1)
alcercu marked this conversation as resolved.
Show resolved Hide resolved
.toString()
.padStart(2, "0")}/${currentDate.getDate().toString().padStart(2, "0")}`;
const S3_PATH = formattedDate + "/" + id + "/";

const s3 = new AWS.S3({
endpoint: "https://s3.filebase.com",
region: "us-east-1",
signatureVersion: "v4",
accessKeyId: process.env.FILEBASE_ACCESS_KEY,
secretAccessKey: process.env.FILEBASE_SECRET_KEY,
});
enum HomeChains {
ARBITRUM_ONE = 42161,
ARBITRUM_RINKEBY = 421611,
ARBITRUM_GOERLI = 421613,
HARDHAT = 31337,
}
async function main(filePath: string) {
let courtsV1;
fs.readFile(filePath, "utf8", (err, jsonString) => {
if (err) {
console.log("File read failed:", err);
return;
}
const json = JSON.parse(jsonString);
courtsV1 = json.map((courtDetails) => ({
...courtDetails,
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

What's the format of courtDetails? If you are destructuring it there is no need to repeat

name: courtDetails.name

as it will already be assigned.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

removed extra step of destructuring.

name: courtDetails.name,
description: courtDetails.description,
summary: courtDetails.summary,
court: courtDetails.court,
uri: courtDetails.uri,
}));
});

// fallback to hardhat node signers on local network
// const governor = (await getNamedAccounts()).governor ?? (await ethers.getSigners())[0].address;
const governor = (await ethers.getSigners())[0];
const chainId = Number(await getChainId());
if (!HomeChains[chainId]) {
console.error(`Aborting: script is not compatible with ${chainId}`);
return;
} else {
console.log("deploying to %s with deployer %s", HomeChains[chainId], governor);
}

//--------uncomment once configuration is set in deployments------
alcercu marked this conversation as resolved.
Show resolved Hide resolved
// const policyRegistryDeployment = await deployments.get("PolicyRegistry");
const policyRegistry = (await ethers.getContractAt(
"PolicyRegistry",
"0xAF0F49Fe110b48bd512F00d51D141F023c9a9106" // arbitrumgoerli contract address
alcercu marked this conversation as resolved.
Show resolved Hide resolved
// policyRegistryDeployment.address
)) as PolicyRegistry;
for (const courtObject of courtsV1) {
var courtV2 = courtObject.court + 1;
var filename = courtObject.name.replace(" ", "-").concat(".json");
const data = { name: courtObject.name, description: courtObject.description, summary: courtObject.summary };
let response = await uploadFormDataToIPFS(data, filename);
console.log(response);

if (response && response.statusCode === 200) {
try {
console.log(courtV2, courtObject.name);
const data = await JSON.parse(response.body);
const cid = "/ipfs/" + data.message.Metadata.cid;
alcercu marked this conversation as resolved.
Show resolved Hide resolved
console.log(cid, "cid");
await policyRegistry.connect(governor).setPolicy(courtV2, courtObject.name, cid);
} catch (error) {
console.log(error);
}
}
}
}

const uploadFormDataToIPFS = async (data, filename) => {
alcercu marked this conversation as resolved.
Show resolved Hide resolved
try {
const params = {
Bucket: process.env.FILEBASE_BUCKET_NAME,
Key: S3_PATH + filename,
ContentType: "application/json",
Body: Buffer.from(JSON.stringify(data)),
};
const request = await s3.upload(params).promise();

const head_params = {
Bucket: process.env.FILEBASE_BUCKET_NAME,
Key: request.key,
};
const head = await s3.headObject(head_params).promise();

return {
statusCode: 200,
body: JSON.stringify({ message: head }),
};
} catch (error) {
console.log(error);

return {
statusCode: 500,
body: JSON.stringify({ message: error }),
};
}
};
main("./config/policies.v1.mainnet.json")
.then(() => process.exit(0))
.catch((error) => {
console.error(error);
process.exit(1);
});
169 changes: 145 additions & 24 deletions web/netlify/functions/uploadToIPFS.ts
Original file line number Diff line number Diff line change
@@ -1,31 +1,152 @@
import { Handler } from "@netlify/functions";
import fetch from "node-fetch";

const ESTUARI_API_KEY = process.env["ESTUARY_API_KEY"];
const ESTUARI_URL = process.env["ESTUARY_GATEWAY"];

export const handler: Handler = async (event, context) => {
context.callbackWaitsForEmptyEventLoop = false;
if (event.body) {
const newHeaders = event.headers;
delete newHeaders.host;
const response = await fetch(ESTUARI_URL, {
method: "POST",
headers: {
Authorization: `Bearer ${ESTUARI_API_KEY}`,
...newHeaders,
},
body: Buffer.from(event.body, "base64"),
});
import { Handler, HandlerEvent } from "@netlify/functions";
import AWS from "aws-sdk";
import { v4 as uuidv4 } from "uuid";
import amqp from "amqplib";
import busboy from "busboy";

const envVariables = {
accessKey: process.env.FILEBASE_ACCESS_KEY,
secretKey: process.env.FILEBASE_SECRET_KEY,
bucketName: process.env.FILEBASE_BUCKET_NAME,
rabbitMQURL: process.env.RABBITMQ_URL,
};

const parsedResponse = await response.json();
const s3 = new AWS.S3({
endpoint: "https://s3.filebase.com",
region: "us-east-1",
signatureVersion: "v4",
accessKeyId: envVariables.accessKey,
secretAccessKey: envVariables.secretKey,
});

export const handler: Handler = async function (event) {
if (!validEnvVariables()) {
return {
statusCode: 500,
body: JSON.stringify({ message: "Env variables missing" }),
};
}
if (!event.body) {
return {
statusCode: response.status,
body: JSON.stringify(parsedResponse),
statusCode: 500,
body: JSON.stringify({ message: "Invalid body format" }),
};
}

const file = parseMultiPartData(event.body, event.headers);
const s3Key = await uploadToS3(file["name"], file["parts"]);
const cid = await getCID(s3Key);
console.log(cid);
await rabbitMQUpload(cid);

return {
statusCode: 500,
body: JSON.stringify({ message: "Invalid body format" }),
statusCode: 200,
body: JSON.stringify({ message: cid }),
};
};

interface IFile {
name: string;
parts: Buffer[];
}

const parseMultiPartData = (
body: string,
headers: HandlerEvent["headers"]
): IFile => {
const file: IFile = {
name: "",
parts: [],
};

const bb = busboy({ headers });

bb.on("file", (_, filestream, metadata) => {
file.name = metadata.filename;
filestream.on("data", (data) => {
file.parts.push(data);
});
});

bb.write(Buffer.from(body, "base64"));

return file;
};

const validEnvVariables = (): boolean => {
return Object.values(envVariables).reduce(
(acc, current) => acc && typeof current !== "undefined",
true
);
};

interface IUploadedPart {
ETag: string;
PartNumber: number;
}

const uploadToS3 = async (name: string, parts: Buffer[]) => {
const multipartInfo: AWS.S3.CreateMultipartUploadRequest = {
Bucket: envVariables.bucketName!,
Key: generateS3Path() + name,
};
const uploadID = await s3
.createMultipartUpload(multipartInfo)
.promise()
.then((result) => result.UploadId);
const uploadedParts: IUploadedPart[] = [];
for (const [i, part] of parts.entries()) {
const partNumber = i + 1;
const partInfo: AWS.S3.UploadPartRequest = {
...multipartInfo,
UploadId: uploadID!,
Body: part,
PartNumber: partNumber,
};
const test = await s3.uploadPart(partInfo).promise();
uploadedParts.push({
ETag: test.ETag!,
PartNumber: partNumber,
});
}
const completeMultipartUploadParams: AWS.S3.CompleteMultipartUploadRequest = {
...multipartInfo,
MultipartUpload: {
Parts: uploadedParts,
},
UploadId: uploadID!,
};
await s3.completeMultipartUpload(completeMultipartUploadParams).promise();

return multipartInfo.Key;
};

const getCID = async (key: string) => {
const headParams: AWS.S3.HeadObjectRequest = {
Bucket: envVariables.bucketName!,
Key: key,
};
const head = await s3.headObject(headParams).promise();

return head.Metadata?.cid;
};

const generateS3Path = (): string => {
const currentDate = new Date();
const formattedDate = currentDate
.toISOString()
.slice(0, 10)
.replace(/-/g, "/");
const id = uuidv4();
return `${formattedDate}/${id}/`;
};

const rabbitMQUpload = async (cid: any) => {
const conn = await amqp.connect(envVariables.rabbitMQURL!);
const channel = await conn.createChannel();
const exchange = "filebase";
await channel.assertExchange(exchange, "fanout", { durable: true });
channel.publish(exchange, "", Buffer.from(cid));
await channel.close();
await conn.close();
};
5 changes: 5 additions & 0 deletions web/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -35,9 +35,11 @@
"@netlify/functions": "^1.4.0",
"@parcel/transformer-svg-react": "~2.7.0",
"@parcel/watcher": "~2.0.0",
"@types/amqplib": "^0.10.1",
"@types/react": "^18.0.25",
"@types/react-dom": "^18.0.9",
"@types/styled-components": "^5.1.26",
"@types/uuid": "^9.0.1",
"@typescript-eslint/eslint-plugin": "^5.53.0",
"@typescript-eslint/parser": "^5.53.0",
"@typescript-eslint/utils": "^5.53.0",
Expand All @@ -62,10 +64,13 @@
"@kleros/ui-components-library": "^1.9.2",
"@sentry/react": "^7.37.2",
"@sentry/tracing": "^7.37.2",
"@types/busboy": "^1.5.0",
"@types/react-modal": "^3.13.1",
"@web3-react/core": "^6.1.9",
"@web3-react/injected-connector": "^6.0.7",
"@web3-react/types": "^6.0.7",
"amqplib": "^0.10.3",
"busboy": "^1.6.0",
"chart.js": "^3.9.1",
"chartjs-adapter-moment": "^1.0.0",
"core-js": "^3.21.1",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ const SubmitEvidenceModal: React.FC<{
.then(async (res) => {
const response = await res.json();
if (res.status === 200) {
const cid = "/ipfs/" + response["cid"];
const cid = "/ipfs/" + response["message"];
await wrapWithToast(
disputeKit.submitEvidence(evidenceGroup, cid)
).then(() => {
Expand Down
2 changes: 1 addition & 1 deletion web/src/utils/uploadFormDataToIPFS.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ export function uploadFormDataToIPFS(formData: FormData): Promise<Response> {
}).then(async (response) =>
response.status === 200
? resolve(response)
: reject({ message: (await response.json()).error.reason })
: reject({ message: (await response.json()).message })
)
),
{
Expand Down
Loading