arweave data version 1
ci/woodpecker/push/woodpecker Pipeline was successful
Podrobnosti
ci/woodpecker/push/woodpecker Pipeline was successful
Podrobnosti
This commit is contained in:
rodič
9ec1becacb
revize
d2703bebae
|
@ -9,5 +9,7 @@ The [IPFS](https://ipfs.io/) addresses of the major versions are stored directly
|
|||
* [ethbrno.eth.link](https://ethbrno.eth.link)
|
||||
* [ethbrno.eth.limo](https://ethbrno.eth.limo)
|
||||
|
||||
Site is configured to prerender all content and refresh the data from Arweave. Publishing script is named upload-data and require your own Arweave wallet (whole JSON string) as an AR parameter in the environment, like `AR={"kty":"RSA","n":....`. IPFS publishing is not prepared out of the box, but it basically takes output of `/build` and send it to the ipfs pinning service like Pinata. We are using self hosted server for this.
|
||||
|
||||
## Authors
|
||||
* vorcigernix
|
||||
* [vorcigernix](https://github.com/vorcigernix) & [tree](https://github.com/burningtree)
|
||||
|
|
Rozdílový obsah nebyl zobrazen, protože je příliš veliký
Načíst rozdílové porovnání
|
@ -10,6 +10,7 @@
|
|||
"lint": "prettier --plugin-search-dir . --check . && eslint .",
|
||||
"format": "prettier --plugin-search-dir . --write .",
|
||||
"make-data": "node ./scripts/make-data.js",
|
||||
"upload-data": "node ./scripts/upload-data.js",
|
||||
"ipfs-deploy": "npx -y --loglevel=error github:burningtree/ipfs-cluster-deploy ./build ethbrno-web",
|
||||
"postinstall": "npx playwright install --with-deps chromium"
|
||||
},
|
||||
|
@ -18,7 +19,9 @@
|
|||
"@sveltejs/adapter-auto": "next",
|
||||
"@sveltejs/adapter-static": "next",
|
||||
"@sveltejs/kit": "next",
|
||||
"arweave": "^1.11.6",
|
||||
"autoprefixer": "^10.4.12",
|
||||
"dotenv": "^16.0.3",
|
||||
"eslint": "^8.16.0",
|
||||
"eslint-config-prettier": "^8.3.0",
|
||||
"eslint-plugin-svelte3": "^4.0.0",
|
||||
|
|
|
@ -0,0 +1,58 @@
|
|||
import Arweave from "arweave";
|
||||
import dotenv from 'dotenv';
|
||||
import fs from 'fs';
|
||||
|
||||
dotenv.config();
|
||||
const arweave = Arweave.init({
|
||||
host: "arweave.net", // Hostname or IP address for a Arweave host
|
||||
port: 443, // Port
|
||||
protocol: "https", // Network protocol http or https
|
||||
timeout: 20000, // Network request timeouts in milliseconds
|
||||
logging: false, // Enable network request logging
|
||||
});
|
||||
|
||||
const datajson = fs.readFileSync('./static/data.json', 'utf8');
|
||||
const datamarkdown = fs.readFileSync('./static/hacker-manual.md', 'utf8');
|
||||
const key = JSON.parse(process.env.AR);
|
||||
|
||||
if (!datajson || !datamarkdown || !key) { console.error("datafiles or key missing"); }
|
||||
|
||||
|
||||
let uploadJSON = await arweave.createTransaction({
|
||||
data: datajson
|
||||
}, key);
|
||||
|
||||
uploadJSON.addTag('Content-Type', 'application/json');
|
||||
uploadJSON.addTag("AppName", "ETHBrno");
|
||||
uploadJSON.addTag("DataTag", "json");
|
||||
await arweave.transactions.sign(uploadJSON, key);
|
||||
//console.log(uploadJSON);
|
||||
|
||||
let uploader = await arweave.transactions.getUploader(uploadJSON);
|
||||
|
||||
while (!uploader.isComplete) {
|
||||
await uploader.uploadChunk();
|
||||
console.log(`${uploader.pctComplete}% complete, ${uploader.uploadedChunks}/${uploader.totalChunks}`);
|
||||
}
|
||||
|
||||
console.log("Data JSON uploaded in transaction:", uploadJSON.id);
|
||||
|
||||
let uploadMD = await arweave.createTransaction({
|
||||
data: datamarkdown
|
||||
}, key);
|
||||
|
||||
uploadMD.addTag('Content-Type', 'text/plain');
|
||||
uploadMD.addTag('charset', 'UTF-8');
|
||||
uploadMD.addTag("AppName", "ETHBrno");
|
||||
uploadMD.addTag("DataTag", "markdown");
|
||||
await arweave.transactions.sign(uploadMD, key);
|
||||
//console.log(uploadJSON);
|
||||
|
||||
let uploader2 = await arweave.transactions.getUploader(uploadMD);
|
||||
|
||||
while (!uploader2.isComplete) {
|
||||
await uploader2.uploadChunk();
|
||||
console.log(`${uploader2.pctComplete}% complete, ${uploader2.uploadedChunks}/${uploader2.totalChunks}`);
|
||||
}
|
||||
|
||||
console.log("Content markdown uploaded in transaction:", uploadMD.id);
|
|
@ -11,11 +11,32 @@
|
|||
}
|
||||
|
||||
onMount(async () => {
|
||||
//console.log(data.contributors);
|
||||
const response = await fetch(`${base}/data.json`);
|
||||
const docrequest = await fetch('https://arweave.net/graphql', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Accept-Encoding': 'gzip, deflate, br',
|
||||
'Content-Type': 'application/json',
|
||||
Accept: 'application/json',
|
||||
Connection: 'keep-alive',
|
||||
DNT: '1',
|
||||
Origin: 'https://arweave.net'
|
||||
},
|
||||
body: JSON.stringify({
|
||||
query:
|
||||
'query {\n transactions(\n tags: [\n { name: "AppName", values: "ETHBrno" }\n { name: "DataTag", values: "json" }\n ]\n first: 1\n ) {\n edges {\n node {\n id\n }\n }\n }\n}\n'
|
||||
})
|
||||
}).then((r) => r.json());
|
||||
const {
|
||||
data: {
|
||||
transactions: { edges }
|
||||
}
|
||||
} = docrequest;
|
||||
//console.log(edges[0].node.id);
|
||||
if (edges.length < 0) return;
|
||||
const response = await fetch(`https://arweave.net/${edges[0].node.id}`);
|
||||
const result = await response.json();
|
||||
data.contributors = result.contributors;
|
||||
//console.log(data.contributors);
|
||||
console.log(data.contributors);
|
||||
});
|
||||
|
||||
$: filteredContributors = data.contributors
|
||||
|
|
|
@ -302,6 +302,6 @@
|
|||
"url": "https://devfolio.co/"
|
||||
}
|
||||
],
|
||||
"time": "2022-10-21T08:48:23.091Z",
|
||||
"time": "2022-10-23T08:26:57.199Z",
|
||||
"version": "v2022-10-16"
|
||||
}
|
Načítá se…
Odkázat v novém úkolu