Skip to content

Commit

Permalink
fix: cleaned
Browse files Browse the repository at this point in the history
  • Loading branch information
Meyanis95 committed Apr 10, 2024
1 parent cc7c5bb commit 9e58ca3
Show file tree
Hide file tree
Showing 5 changed files with 90 additions and 159 deletions.
16 changes: 0 additions & 16 deletions example/src/BenchmarkView.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -77,22 +77,6 @@ export default function BenchmarkView({}) {
}
}, [qrCodeValue]);

// useEffect(() => {
// let temp: string[] = [];
// if (ready) {
// for (let i = 0; i < 10; i++) {
// let chunkPath = `${zkeyChunksFolderPath}/circuit_final_${i}.zkey`;
// RNFS.exists(chunkPath).then((resp) => {
// console.log(chunkPath);
// console.log(resp);
// });
// temp.push(chunkPath);
// }
// console.log('Chunked paths: ', temp);
// setChunkPaths(temp);
// }
// }, [ready]);

if (!ready) {
const startSetup = Date.now();
setupProver().then(() => {
Expand Down
15 changes: 0 additions & 15 deletions src/aadhaarScanner.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -76,26 +76,11 @@ const styles = StyleSheet.create({
alignItems: 'center',
backgroundColor: 'rgba(0, 0, 0, 0.7)',
},
// camera: {
// height: '100%',
// width: '100%',
// alignSelf: 'center',
// },
cutout: {
height: 350, // The size of the QR code cutout
width: 350,
borderColor: '#000', // Border color from your design
},
// overlay: {
// position: 'absolute', // Overlay must be absolutely positioned
// top: 0, // Full overlay over the camera view
// left: 0,
// right: 0,
// bottom: 0,
// alignItems: 'center',
// justifyContent: 'center',
// backgroundColor: 'transparent', // Transparent background
// },
camera: {
height: '100%', // Take up full height
width: '100%', // Take up full width
Expand Down
7 changes: 7 additions & 0 deletions src/constants.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
export const fileUrls = {
'aadhaar-verifier.dat':
'https://anon-aadhaar.s3.ap-south-1.amazonaws.com/v2.0.0/aadhaar-verifier.dat',
'vkey.json': 'https://d1re67zv2jtrxt.cloudfront.net/v2.0.0/vkey.json',
'circuit_final.zkey':
'https://d1re67zv2jtrxt.cloudfront.net/v2.0.0/circuit_final.zkey',
};
94 changes: 1 addition & 93 deletions src/groth16Prover.ts
Original file line number Diff line number Diff line change
@@ -1,9 +1,7 @@
import { NativeModules, Platform } from 'react-native';
import RNFS from 'react-native-fs';
import fetchBlob from 'react-native-blob-util';
// import base64 from 'react-native-base64';
// import { TextDecoder } from 'text-encoding';
// import pako from 'pako';
import { fileUrls } from './constants';

const LINKING_ERROR =
`The package 'react-native-rapidsnark' doesn't seem to be linked. Make sure: \n\n` +
Expand All @@ -25,62 +23,6 @@ export type AnonAadhaarArgs = {
revealPinCode: string[];
};

const fileUrls = {
'aadhaar-verifier.dat':
'https://anon-aadhaar.s3.ap-south-1.amazonaws.com/v2.0.0/aadhaar-verifier.dat',
'vkey.json': 'https://d1re67zv2jtrxt.cloudfront.net/v2.0.0/vkey.json',
'circuit_final.zkey':
'https://d1re67zv2jtrxt.cloudfront.net/v2.0.0/circuit_final.zkey',
};

const chunkedDirectoryPath = `${RNFS.DocumentDirectoryPath}/chunked`;

async function ensureDirectoryExists(path: string) {
const directoryExists = await RNFS.exists(path);
if (!directoryExists) {
await RNFS.mkdir(path);
}
}

// async function downloadFileForChunkedZkey(url: string, targetPath: string) {
// // Determine the file extension
// const fileExtension = url.split('.').pop();
// const tempPath = targetPath + (fileExtension === 'gz' ? '.gz' : '');

// try {
// // Download the file to a temporary path
// await fetchBlob.config({ path: tempPath }).fetch('GET', url);
// console.log('The file is temporarily saved to ', tempPath);

// // If the file is a .gz file, read it, decompress it, and write the decompressed content
// if (fileExtension === 'gz') {
// // Read the .gz file as base64
// const base64Data = await RNFS.readFile(tempPath, 'base64');
// // Convert base64 to ArrayBuffer using react-native-base64
// const binaryData = base64.decode(base64Data);
// let bytes = new Uint8Array(binaryData.length);
// for (let i = 0; i < binaryData.length; i++) {
// bytes[i] = binaryData.charCodeAt(i);
// }
// // Decompress with pako
// const decompressed = pako.ungzip(bytes);
// // Convert the decompressed data back to a string to write it
// const decoder = new TextDecoder('utf-8');
// const decompressedStr = decoder.decode(decompressed);

// // Write the decompressed data to the target path
// await RNFS.writeFile(targetPath, decompressedStr, 'utf8');
// console.log('File decompressed to ', targetPath);

// // Optionally, remove the original .gz file after decompression
// await RNFS.unlink(tempPath);
// console.log('Original .gz file removed');
// }
// } catch (error) {
// console.error('Error during file download or decompression:', error);
// }
// }

async function downloadFile(url: string, targetPath: string) {
try {
// Download the file to a temporary path
Expand All @@ -91,40 +33,6 @@ async function downloadFile(url: string, targetPath: string) {
}
}

export async function setupProverWithChunkedZkey() {
console.log('Starting setup!');
const directoryPath = RNFS.DocumentDirectoryPath;

for (const [key, url] of Object.entries(fileUrls)) {
console.log('Round for key: ', key);
// If Zkey, loading zkey chunks
if (key === 'circuit_final') {
for (let i = 0; i < 10; i++) {
const filePath = `${chunkedDirectoryPath}/${key}_${i}.zkey`;
const fileExists = await RNFS.exists(filePath);

if (fileExists) continue;

console.log('Fetching => ', url + key + `_${i}.gz`);
console.log('Stored at => ', filePath);

await ensureDirectoryExists(chunkedDirectoryPath);

await downloadFile(url + key + `_${i}.gz`, filePath);
}
} else {
const filePath = `${directoryPath}/${key}`;
const fileExists = await RNFS.exists(filePath);

if (!fileExists) {
await downloadFile(url, filePath);
}

console.log(`${key} loaded at ${filePath}`);
}
}
}

export async function setupProver() {
console.log('Starting setup!');
for (const [key, url] of Object.entries(fileUrls)) {
Expand Down
117 changes: 82 additions & 35 deletions src/util.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,9 @@
import pako from 'pako';
import RNFS from 'react-native-fs';
import base64 from 'react-native-base64';
import { TextDecoder } from 'text-encoding';
import { fileUrls } from './constants';
import fetchBlob from 'react-native-blob-util';

export function str2ab(str: string) {
const buf = new ArrayBuffer(str.length);
Expand Down Expand Up @@ -123,42 +128,84 @@ export function Uint8ArrayToCharArray(a: Uint8Array): string[] {
return Array.from(a).map((x) => x.toString());
}

export function utf8ArrayToStr(array: any) {
let out = '',
i = 0,
len = array.length;
let char2, char3;

while (i < len) {
let c = array[i++];
switch (c >> 4) {
case 0:
case 1:
case 2:
case 3:
case 4:
case 5:
case 6:
case 7:
// 0xxxxxxx
out += String.fromCharCode(c);
break;
case 12:
case 13:
// 110x xxxx 10xx xxxx
char2 = array[i++];
out += String.fromCharCode(((c & 0x1f) << 6) | (char2 & 0x3f));
break;
case 14:
// 1110 xxxx 10xx xxxx 10xx xxxx
char2 = array[i++];
char3 = array[i++];
out += String.fromCharCode(
((c & 0x0f) << 12) | ((char2 & 0x3f) << 6) | ((char3 & 0x3f) << 0)
);
break;
export async function setupProverWithChunkedZkey() {
console.log('Starting setup!');
const directoryPath = RNFS.DocumentDirectoryPath;

for (const [key, url] of Object.entries(fileUrls)) {
console.log('Round for key: ', key);
// If Zkey, loading zkey chunks
if (key === 'circuit_final') {
for (let i = 0; i < 10; i++) {
const filePath = `${chunkedDirectoryPath}/${key}_${i}.zkey`;
const fileExists = await RNFS.exists(filePath);

if (fileExists) continue;

console.log('Fetching => ', url + key + `_${i}.gz`);
console.log('Stored at => ', filePath);

await ensureDirectoryExists(chunkedDirectoryPath);

await downloadFileForChunkedZkey(url + key + `_${i}.gz`, filePath);
}
} else {
const filePath = `${directoryPath}/${key}`;
const fileExists = await RNFS.exists(filePath);

if (!fileExists) {
await downloadFileForChunkedZkey(url, filePath);
}

console.log(`${key} loaded at ${filePath}`);
}
}
}

const chunkedDirectoryPath = `${RNFS.DocumentDirectoryPath}/chunked`;

return out;
async function ensureDirectoryExists(path: string) {
const directoryExists = await RNFS.exists(path);
if (!directoryExists) {
await RNFS.mkdir(path);
}
}

async function downloadFileForChunkedZkey(url: string, targetPath: string) {
// Determine the file extension
const fileExtension = url.split('.').pop();
const tempPath = targetPath + (fileExtension === 'gz' ? '.gz' : '');

try {
// Download the file to a temporary path
await fetchBlob.config({ path: tempPath }).fetch('GET', url);
console.log('The file is temporarily saved to ', tempPath);

// If the file is a .gz file, read it, decompress it, and write the decompressed content
if (fileExtension === 'gz') {
// Read the .gz file as base64
const base64Data = await RNFS.readFile(tempPath, 'base64');
// Convert base64 to ArrayBuffer using react-native-base64
const binaryData = base64.decode(base64Data);
let bytes = new Uint8Array(binaryData.length);
for (let i = 0; i < binaryData.length; i++) {
bytes[i] = binaryData.charCodeAt(i);
}
// Decompress with pako
const decompressed = pako.ungzip(bytes);
// Convert the decompressed data back to a string to write it
const decoder = new TextDecoder('utf-8');
const decompressedStr = decoder.decode(decompressed);

// Write the decompressed data to the target path
await RNFS.writeFile(targetPath, decompressedStr, 'utf8');
console.log('File decompressed to ', targetPath);

// Optionally, remove the original .gz file after decompression
await RNFS.unlink(tempPath);
console.log('Original .gz file removed');
}
} catch (error) {
console.error('Error during file download or decompression:', error);
}
}

0 comments on commit 9e58ca3

Please sign in to comment.