Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Policy upload to Filebase/IPFS #639

Open
wants to merge 10 commits into
base: master
Choose a base branch
from
Prev Previous commit
Next Next commit
fix(in progress): changed upload to ipfs netlify function to support …
…filebase
  • Loading branch information
Params10 committed Mar 8, 2023
commit 564159be50ee01d375c3aac647aeb490872de135
68 changes: 48 additions & 20 deletions web/netlify/functions/uploadToIPFS.ts
Original file line number Diff line number Diff line change
@@ -1,31 +1,59 @@
import { Handler } from "@netlify/functions";
import fetch from "node-fetch";
const AWS = require("aws-sdk");
const { v4: uuidv4 } = require("uuid");
const id = uuidv4();
const currentDate = new Date();
const formattedDate = `${currentDate.getFullYear()}/${(
currentDate.getMonth() + 1
)
.toString()
.padStart(2, "0")}/${currentDate.getDate().toString().padStart(2, "0")}`;
const S3_PATH = formattedDate + "/" + id + "/";
const amqp = require("amqplib");

const ESTUARI_API_KEY = process.env["ESTUARY_API_KEY"];
const ESTUARI_URL = process.env["ESTUARY_GATEWAY"];
const s3 = new AWS.S3({
endpoint: "https://s3.filebase.com",
region: "us-east-1",
signatureVersion: "v4",
accessKeyId: process.env.FILEBASE_ACCESS_KEY,
secretAccessKey: process.env.FILEBASE_SECRET_KEY,
});

export const handler: Handler = async (event, context) => {
context.callbackWaitsForEmptyEventLoop = false;
export const handler = async function (event: any, context: any) {
if (event.body) {
const newHeaders = event.headers;
delete newHeaders.host;
const response = await fetch(ESTUARI_URL, {
method: "POST",
headers: {
Authorization: `Bearer ${ESTUARI_API_KEY}`,
...newHeaders,
},
body: Buffer.from(event.body, "base64"),
});

const parsedResponse = await response.json();
const params = {
Bucket: process.env.FILEBASE_BUCKET_NAME,
Key: S3_PATH + event.headers["file-name"],
ContentType: "text/plain",
Body: event["body"],
};
const request = await s3.upload(params).promise();
const head_params = {
Bucket: process.env.FILEBASE_BUCKET_NAME,
Key: request.key,
};
const head = await s3.headObject(head_params).promise();
await rabbitMQUpload(head.Metadata.cid);
return {
statusCode: response.status,
body: JSON.stringify(parsedResponse),
statusCode: 200,
body: JSON.stringify({ message: head.Metadata.cid }),
};
}
return {
statusCode: 500,
body: JSON.stringify({ message: "Invalid body format" }),
};
};

const rabbitMQUpload = async (cid: any) => {
// Connect to RabbitMQ
const conn = await amqp.connect(process.env.RABBITMQ_URL);
const channel = await conn.createChannel();
const exchange = "filebase";
await channel.assertExchange(exchange, "fanout", { durable: true });
// Publish the IPFS CID to the exchange
channel.publish(exchange, "", Buffer.from(cid));
console.log(`Sent IPFS CID ${cid} to exchange ${exchange}`);
// Close the connection and return success
await channel.close();
await conn.close();
};