Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: http + json extended example #91

Merged
merged 21 commits into from
Sep 24, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
40 changes: 40 additions & 0 deletions .github/workflows/lint.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
name: lint

on:
pull_request:
types: [opened, synchronize, reopened]
push:
branches: [main]

jobs:
fmt:
name: fmt
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v4

- name: Install Rust
uses: dtolnay/rust-toolchain@master
with:
toolchain: nightly-2024-06-10
components: rustfmt

- name: cargo fmt
run: cargo fmt --all -- --check

clippy:
name: clippy
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v4

- name: Install Rust
uses: dtolnay/rust-toolchain@master
with:
toolchain: nightly-2024-06-10
components: clippy

- name: cargo clippy
run: cargo clippy --all
16 changes: 15 additions & 1 deletion .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ on:
branches: [ main ]

jobs:
test:
circom:
runs-on: ubuntu-latest

steps:
Expand All @@ -33,3 +33,17 @@ jobs:

- name: Run tests
run: npm run test
rust:
runs-on: ubuntu-latest

steps:
- name: Checkout code
uses: actions/checkout@v4

- name: Install Rust
uses: dtolnay/rust-toolchain@master
with:
toolchain: nightly-2024-06-10

- name: Run tests
run: cargo test --all
24 changes: 0 additions & 24 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -129,30 +129,6 @@ This is our local Rust command line application.
Please see the [documentation](docs/pabuild.md) for how to use this alongside the other tools.


### Rust Example Witness JSON Creation
To generate example input JSON files for the Circom circuits, run:

```bash
cargo install --path .
```

to install the `witness` binary.

To get the basic idea, run `witness --help`. It can process and generate JSON files to be used for the circuits.
For example, if we have a given JSON file we want to parse such as `examples/json/test/example.json` for the `extract` circuit (see `circuits.json`), then we can:

```bash
witness json --input-file examples/json/test/example.json --output-dir inputs/extract --output-filename input.json
```

For an HTTP request/response, you can generate a JSON input via:
```bash
witness http --input-file examples/http/get_request.http --output-dir inputs/get_request --output-filename input.json
```

Afterwards, you can run `circomkit compile get_request` then `circomkit witness get_request input`.


## License

Licensed under the Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)
Expand Down
8 changes: 4 additions & 4 deletions circuits/http/extractor.circom
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ template ExtractResponse(DATA_BYTES, maxContentLength) {

// Initialze the parser
component State[DATA_BYTES];
State[0] = StateUpdate();
State[0] = HttpStateUpdate();
State[0].byte <== data[0];
State[0].parsing_start <== 1;
State[0].parsing_header <== 0;
Expand All @@ -36,7 +36,7 @@ template ExtractResponse(DATA_BYTES, maxContentLength) {
dataMask[0] <== 0;

for(var data_idx = 1; data_idx < DATA_BYTES; data_idx++) {
State[data_idx] = StateUpdate();
State[data_idx] = HttpStateUpdate();
State[data_idx].byte <== data[data_idx];
State[data_idx].parsing_start <== State[data_idx - 1].next_parsing_start;
State[data_idx].parsing_header <== State[data_idx - 1].next_parsing_header;
Expand Down Expand Up @@ -96,7 +96,7 @@ template ExtractHeaderValue(DATA_BYTES, headerNameLength, maxValueLength) {

// Initialze the parser
component State[DATA_BYTES];
State[0] = StateUpdate();
State[0] = HttpStateUpdate();
State[0].byte <== data[0];
State[0].parsing_start <== 1;
State[0].parsing_header <== 0;
Expand All @@ -115,7 +115,7 @@ template ExtractHeaderValue(DATA_BYTES, headerNameLength, maxValueLength) {
valueMask[0] <== 0;

for(var data_idx = 1; data_idx < DATA_BYTES; data_idx++) {
State[data_idx] = StateUpdate();
State[data_idx] = HttpStateUpdate();
State[data_idx].byte <== data[data_idx];
State[data_idx].parsing_start <== State[data_idx - 1].next_parsing_start;
State[data_idx].parsing_header <== State[data_idx - 1].next_parsing_header;
Expand Down
6 changes: 3 additions & 3 deletions circuits/http/interpreter.circom
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ template inStartLine() {
template inStartMiddle() {
signal input parsing_start;
signal output out;

out <== IsEqual()([parsing_start, 2]);
}

Expand Down Expand Up @@ -49,7 +49,7 @@ template HeaderFieldNameValueMatch(dataLen, nameLen, valueLen) {
signal input r;
signal input index;

component syntax = Syntax();
component syntax = HttpSyntax();

// signal output value[valueLen];

Expand All @@ -76,7 +76,7 @@ template HeaderFieldNameMatch(dataLen, nameLen) {
signal input r;
signal input index;

component syntax = Syntax();
component syntax = HttpSyntax();

// is name matches
signal headerNameMatch <== SubstringMatchWithIndex(dataLen, nameLen)(data, headerName, r, index);
Expand Down
20 changes: 10 additions & 10 deletions circuits/http/locker.circom
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ template LockStartLine(DATA_BYTES, beginningLen, middleLen, finalLen) {

// Initialze the parser
component State[DATA_BYTES];
State[0] = StateUpdate();
State[0] = HttpStateUpdate();
State[0].byte <== data[0];
State[0].parsing_start <== 1;
State[0].parsing_header <== 0;
Expand All @@ -31,7 +31,7 @@ template LockStartLine(DATA_BYTES, beginningLen, middleLen, finalLen) {
State[0].parsing_body <== 0;
State[0].line_status <== 0;

/*
/*
Note, because we know a beginning is the very first thing in a request
we can make this more efficient by just comparing the first `beginningLen` bytes
of the data ASCII against the beginning ASCII itself.
Expand All @@ -50,15 +50,15 @@ template LockStartLine(DATA_BYTES, beginningLen, middleLen, finalLen) {
var middle_end_counter = 1;
var final_end_counter = 1;
for(var data_idx = 1; data_idx < DATA_BYTES; data_idx++) {
State[data_idx] = StateUpdate();
State[data_idx] = HttpStateUpdate();
State[data_idx].byte <== data[data_idx];
State[data_idx].parsing_start <== State[data_idx - 1].next_parsing_start;
State[data_idx].parsing_header <== State[data_idx - 1].next_parsing_header;
State[data_idx].parsing_field_name <== State[data_idx-1].next_parsing_field_name;
State[data_idx].parsing_field_value <== State[data_idx-1].next_parsing_field_value;
State[data_idx].parsing_body <== State[data_idx - 1].next_parsing_body;
State[data_idx].line_status <== State[data_idx - 1].next_line_status;

// Check remaining beginning bytes
if(data_idx < beginningLen) {
beginningIsEqual[data_idx] <== IsEqual()([data[data_idx], beginning[data_idx]]);
Expand All @@ -70,7 +70,7 @@ template LockStartLine(DATA_BYTES, beginningLen, middleLen, finalLen) {
middleMask[data_idx] <== inStartMiddle()(State[data_idx].parsing_start);
finalMask[data_idx] <== inStartEnd()(State[data_idx].parsing_start);
middle_start_counter += startLineMask[data_idx] - middleMask[data_idx] - finalMask[data_idx];
// The end of middle is the start of the final
// The end of middle is the start of the final
middle_end_counter += startLineMask[data_idx] - finalMask[data_idx];
final_end_counter += startLineMask[data_idx];

Expand All @@ -86,7 +86,7 @@ template LockStartLine(DATA_BYTES, beginningLen, middleLen, finalLen) {
log("middle_end_counter = ", middle_end_counter);
log("final_end_counter = ", final_end_counter);
log("xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx");
}
}

// Debugging
log("State[", DATA_BYTES, "].parsing_start ", "= ", State[DATA_BYTES-1].next_parsing_start);
Expand All @@ -105,7 +105,7 @@ template LockStartLine(DATA_BYTES, beginningLen, middleLen, finalLen) {
signal middleMatch <== SubstringMatchWithIndex(DATA_BYTES, middleLen)(data, middle, 100, middle_start_counter);
middleMatch === 1;
middleLen === middle_end_counter - middle_start_counter - 1;

// Check final is correct by substring match and length check
// TODO: change r
signal finalMatch <== SubstringMatchWithIndex(DATA_BYTES, finalLen)(data, final, 100, middle_end_counter);
Expand All @@ -128,7 +128,7 @@ template LockHeader(DATA_BYTES, headerNameLen, headerValueLen) {

// Initialze the parser
component State[DATA_BYTES];
State[0] = StateUpdate();
State[0] = HttpStateUpdate();
State[0].byte <== data[0];
State[0].parsing_start <== 1;
State[0].parsing_header <== 0;
Expand All @@ -144,7 +144,7 @@ template LockHeader(DATA_BYTES, headerNameLen, headerValueLen) {
var hasMatched = 0;

for(var data_idx = 1; data_idx < DATA_BYTES; data_idx++) {
State[data_idx] = StateUpdate();
State[data_idx] = HttpStateUpdate();
State[data_idx].byte <== data[data_idx];
State[data_idx].parsing_start <== State[data_idx - 1].next_parsing_start;
State[data_idx].parsing_header <== State[data_idx - 1].next_parsing_header;
Expand All @@ -158,7 +158,7 @@ template LockHeader(DATA_BYTES, headerNameLen, headerValueLen) {
headerFieldNameValueMatch[data_idx].data <== data;
headerFieldNameValueMatch[data_idx].headerName <== header;
headerFieldNameValueMatch[data_idx].headerValue <== value;
headerFieldNameValueMatch[data_idx].r <== 100;
headerFieldNameValueMatch[data_idx].r <== 100;
headerFieldNameValueMatch[data_idx].index <== data_idx;
isHeaderFieldNameValueMatch[data_idx] <== isHeaderFieldNameValueMatch[data_idx-1] + headerFieldNameValueMatch[data_idx].out;

Expand Down
6 changes: 3 additions & 3 deletions circuits/http/parser/language.circom
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ pragma circom 2.1.9;

// All the possible request methods: https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods

template Syntax() {
template HttpSyntax() {
//-Delimeters---------------------------------------------------------------------------------//
// - ASCII char `:`
signal output COLON <== 58;
Expand All @@ -16,9 +16,9 @@ template Syntax() {
// https://www.rfc-editor.org/rfc/rfc2616#section-2.2
// https://www.rfc-editor.org/rfc/rfc7230#section-3.5
// - ASCII char `\r` (carriage return)
signal output CR <== 13;
signal output CR <== 13;
// - ASCII char `\n` (line feed)
signal output LF <== 10;
signal output LF <== 10;
// - ASCII char: ` `
signal output SPACE <== 32;
//-Escape-------------------------------------------------------------------------------------//
Expand Down
14 changes: 7 additions & 7 deletions circuits/http/parser/machine.circom
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,13 @@ pragma circom 2.1.9;
include "language.circom";
include "../../utils/array.circom";

template StateUpdate() {
template HttpStateUpdate() {
signal input parsing_start; // flag that counts up to 3 for each value in the start line
signal input parsing_header; // Flag + Counter for what header line we are in
signal input parsing_field_name; // flag that tells if parsing header field name
signal input parsing_field_value; // flag that tells if parsing header field value
signal input parsing_body; // Flag when we are inside body
signal input line_status; // Flag that counts up to 4 to read a double CLRF
signal input line_status; // Flag that counts up to 4 to read a double CRLF
signal input byte;

signal output next_parsing_start;
Expand All @@ -19,20 +19,20 @@ template StateUpdate() {
signal output next_parsing_body;
signal output next_line_status;

component Syntax = Syntax();
component HttpSyntax = HttpSyntax();

//---------------------------------------------------------------------------------//
// check if we read space or colon
component readSP = IsEqual();
readSP.in <== [byte, Syntax.SPACE];
readSP.in <== [byte, HttpSyntax.SPACE];
component readColon = IsEqual();
readColon.in <== [byte, Syntax.COLON];
readColon.in <== [byte, HttpSyntax.COLON];

// Check if what we just read is a CR / LF
component readCR = IsEqual();
readCR.in <== [byte, Syntax.CR];
readCR.in <== [byte, HttpSyntax.CR];
component readLF = IsEqual();
readLF.in <== [byte, Syntax.LF];
readLF.in <== [byte, HttpSyntax.LF];

signal notCRAndLF <== (1 - readCR.out) * (1 - readLF.out);
//---------------------------------------------------------------------------------//
Expand Down
4 changes: 2 additions & 2 deletions circuits/http/parser/parser.circom
Original file line number Diff line number Diff line change
Expand Up @@ -18,15 +18,15 @@ template Parser(DATA_BYTES) {

// Initialze the parser
component State[DATA_BYTES];
State[0] = StateUpdate();
State[0] = HttpStateUpdate();
State[0].byte <== data[0];
State[0].parsing_start <== 1;
State[0].parsing_header <== 0;
State[0].parsing_body <== 0;
State[0].line_status <== 0;

for(var data_idx = 1; data_idx < DATA_BYTES; data_idx++) {
State[data_idx] = StateUpdate();
State[data_idx] = HttpStateUpdate();
State[data_idx].byte <== data[data_idx];
State[data_idx].parsing_start <== State[data_idx - 1].next_parsing_start;
State[data_idx].parsing_header <== State[data_idx - 1].next_parsing_header;
Expand Down
4 changes: 2 additions & 2 deletions circuits/json/interpreter.circom
Original file line number Diff line number Diff line change
Expand Up @@ -206,7 +206,7 @@ template NextKVPairAtDepth(n, depth) {
signal input currByte;
signal output out;

var logMaxDepth = log2Ceil(n);
var logMaxDepth = log2Ceil(n+1);

component topOfStack = GetTopOfStack(n);
topOfStack.stack <== stack;
Expand All @@ -219,7 +219,7 @@ template NextKVPairAtDepth(n, depth) {
component syntax = Syntax();
signal isComma <== IsEqual()([currByte, syntax.COMMA]);
// pointer <= depth
signal atLessDepth <== LessEqThan(logMaxDepth)([pointer, depth]);
signal atLessDepth <== LessEqThan(logMaxDepth)([pointer-1, depth]);
// current depth is less than key depth
signal isCommaAtDepthLessThanCurrent <== isComma * atLessDepth;

Expand Down
Loading