From dab97cb1e535c9770ad8748c09ea7f1aee855c62 Mon Sep 17 00:00:00 2001
From: Graeme Coupar <grambo@grambo.me.uk>
Date: Sun, 7 Jan 2024 16:14:44 +0000
Subject: [PATCH] First pass of cynic-parser (#803)

#### Why are we making this change?

There are parts of the latest spec that `graphql-parser` doesn't
support, so I want to replace it with an alternative.

Other options out there that I'm aware of:

- `async-graphql-parser` would work but its AST is quite annoying to
work with, and last time I tested it was slower than `graphql-parser`.
- `apollo-parser` continues on failure to parse, which is nice but means
there are a lot of `Option`s in its AST. I don't particularly want that
behaviour so that's out.
- `apollo-compiler` would probably make `apollo-parser` workable, but
feels like too heavy a dependency for my needs.

So it looks like I might write my own. Also, it'll be fun.

#### What effects does this change have?

Adds the first pass of a parser for GraphQL type system documents.
---
 Cargo.lock                                | 338 ++++++++++++-
 Cargo.toml                                |   6 +-
 cynic-parser/Cargo.toml                   |  31 ++
 cynic-parser/benches/parsing-benchmark.rs |  25 +
 cynic-parser/build.rs                     |   3 +
 cynic-parser/src/ast.rs                   | 262 ++++++++++
 cynic-parser/src/ast/ids.rs               | 171 +++++++
 cynic-parser/src/ast/reader.rs            | 225 +++++++++
 cynic-parser/src/lexer/mod.rs             |  36 ++
 cynic-parser/src/lexer/tokens.rs          | 590 ++++++++++++++++++++++
 cynic-parser/src/lib.rs                   |  84 +++
 cynic-parser/src/printer.rs               | 195 +++++++
 cynic-parser/src/schema.lalrpop           | 202 ++++++++
 13 files changed, 2154 insertions(+), 14 deletions(-)
 create mode 100644 cynic-parser/Cargo.toml
 create mode 100644 cynic-parser/benches/parsing-benchmark.rs
 create mode 100644 cynic-parser/build.rs
 create mode 100644 cynic-parser/src/ast.rs
 create mode 100644 cynic-parser/src/ast/ids.rs
 create mode 100644 cynic-parser/src/ast/reader.rs
 create mode 100644 cynic-parser/src/lexer/mod.rs
 create mode 100644 cynic-parser/src/lexer/tokens.rs
 create mode 100644 cynic-parser/src/lib.rs
 create mode 100644 cynic-parser/src/printer.rs
 create mode 100644 cynic-parser/src/schema.lalrpop

diff --git a/Cargo.lock b/Cargo.lock
index 4a646408..79ba7bfa 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -207,6 +207,15 @@ version = "0.9.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "eab1c04a571841102f5345a8fc0f6bb3d31c315dec879b5c6e42e40ce7ffa34e"
 
+[[package]]
+name = "ascii-canvas"
+version = "3.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8824ecca2e851cec16968d54a01dd372ef8f95b244fb84b84e70128be347c3c6"
+dependencies = [
+ "term",
+]
+
 [[package]]
 name = "assert_matches"
 version = "1.5.0"
@@ -435,6 +444,27 @@ dependencies = [
  "serde",
 ]
 
+[[package]]
+name = "beef"
+version = "0.5.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3a8241f3ebb85c056b509d4327ad0358fbbba6ffb340bf388f26350aeda225b1"
+
+[[package]]
+name = "bit-set"
+version = "0.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1"
+dependencies = [
+ "bit-vec",
+]
+
+[[package]]
+name = "bit-vec"
+version = "0.6.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb"
+
 [[package]]
 name = "bitflags"
 version = "1.3.2"
@@ -949,6 +979,12 @@ dependencies = [
  "cfg-if",
 ]
 
+[[package]]
+name = "crunchy"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7"
+
 [[package]]
 name = "crypto-mac"
 version = "0.10.1"
@@ -1085,6 +1121,20 @@ dependencies = [
  "thiserror",
 ]
 
+[[package]]
+name = "cynic-parser"
+version = "0.1.0"
+dependencies = [
+ "criterion",
+ "graphql-parser",
+ "indexmap 2.1.0",
+ "insta",
+ "lalrpop",
+ "lalrpop-util",
+ "logos",
+ "pretty",
+]
+
 [[package]]
 name = "cynic-proc-macros"
 version = "3.3.1"
@@ -1162,7 +1212,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856"
 dependencies = [
  "cfg-if",
- "hashbrown 0.14.0",
+ "hashbrown 0.14.3",
  "lock_api",
  "once_cell",
  "parking_lot_core",
@@ -1182,6 +1232,12 @@ dependencies = [
  "tokio",
 ]
 
+[[package]]
+name = "diff"
+version = "0.1.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8"
+
 [[package]]
 name = "digest"
 version = "0.9.0"
@@ -1191,6 +1247,27 @@ dependencies = [
  "generic-array",
 ]
 
+[[package]]
+name = "dirs-next"
+version = "2.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1"
+dependencies = [
+ "cfg-if",
+ "dirs-sys-next",
+]
+
+[[package]]
+name = "dirs-sys-next"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d"
+dependencies = [
+ "libc",
+ "redox_users",
+ "winapi",
+]
+
 [[package]]
 name = "discard"
 version = "1.0.4"
@@ -1209,6 +1286,15 @@ version = "1.9.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07"
 
+[[package]]
+name = "ena"
+version = "0.14.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c533630cf40e9caa44bd91aadc88a75d75a4c3a12b4cfde353cbed41daa1e1f1"
+dependencies = [
+ "log",
+]
+
 [[package]]
 name = "enclose"
 version = "1.1.8"
@@ -1286,10 +1372,16 @@ checksum = "d4029edd3e734da6fe05b6cd7bd2960760a616bd2ddd0d59a0124746d6272af0"
 dependencies = [
  "cfg-if",
  "libc",
- "redox_syscall",
+ "redox_syscall 0.3.5",
  "windows-sys 0.48.0",
 ]
 
+[[package]]
+name = "fixedbitset"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
+
 [[package]]
 name = "flate2"
 version = "1.0.27"
@@ -1637,9 +1729,9 @@ dependencies = [
 
 [[package]]
 name = "hashbrown"
-version = "0.14.0"
+version = "0.14.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a"
+checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604"
 
 [[package]]
 name = "heck"
@@ -1868,12 +1960,12 @@ dependencies = [
 
 [[package]]
 name = "indexmap"
-version = "2.0.0"
+version = "2.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d5477fe2230a79769d8dc68e0eabf5437907c0457a5614a9e8dddb67f65eb65d"
+checksum = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f"
 dependencies = [
  "equivalent",
- "hashbrown 0.14.0",
+ "hashbrown 0.14.3",
 ]
 
 [[package]]
@@ -2004,6 +2096,38 @@ dependencies = [
  "log",
 ]
 
+[[package]]
+name = "lalrpop"
+version = "0.20.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "da4081d44f4611b66c6dd725e6de3169f9f63905421e8626fcb86b6a898998b8"
+dependencies = [
+ "ascii-canvas",
+ "bit-set",
+ "diff",
+ "ena",
+ "is-terminal",
+ "itertools 0.10.5",
+ "lalrpop-util",
+ "petgraph",
+ "pico-args",
+ "regex",
+ "regex-syntax 0.7.5",
+ "string_cache",
+ "term",
+ "tiny-keccak",
+ "unicode-xid",
+]
+
+[[package]]
+name = "lalrpop-util"
+version = "0.20.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3f35c735096c0293d313e8f2a641627472b83d01b937177fe76e5e2708d31e0d"
+dependencies = [
+ "regex",
+]
+
 [[package]]
 name = "lazy_static"
 version = "1.4.0"
@@ -2039,6 +2163,17 @@ dependencies = [
  "libc",
 ]
 
+[[package]]
+name = "libredox"
+version = "0.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "85c833ca1e66078851dba29046874e38f08b2c883700aa29a03ddd3b23814ee8"
+dependencies = [
+ "bitflags 2.4.0",
+ "libc",
+ "redox_syscall 0.4.1",
+]
+
 [[package]]
 name = "libz-sys"
 version = "1.1.12"
@@ -2088,6 +2223,38 @@ dependencies = [
  "value-bag",
 ]
 
+[[package]]
+name = "logos"
+version = "0.13.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c000ca4d908ff18ac99b93a062cb8958d331c3220719c52e77cb19cc6ac5d2c1"
+dependencies = [
+ "logos-derive",
+]
+
+[[package]]
+name = "logos-codegen"
+version = "0.13.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dc487311295e0002e452025d6b580b77bb17286de87b57138f3b5db711cded68"
+dependencies = [
+ "beef",
+ "fnv",
+ "proc-macro2",
+ "quote",
+ "regex-syntax 0.6.29",
+ "syn 2.0.36",
+]
+
+[[package]]
+name = "logos-derive"
+version = "0.13.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dbfc0d229f1f42d790440136d941afd806bc9e949e2bcb8faa813b0f00d1267e"
+dependencies = [
+ "logos-codegen",
+]
+
 [[package]]
 name = "maplit"
 version = "1.0.2"
@@ -2163,6 +2330,12 @@ dependencies = [
  "tempfile",
 ]
 
+[[package]]
+name = "new_debug_unreachable"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e4a24736216ec316047a1fc4252e27dabb04218aa4a3f37c6e7ddbf1f9782b54"
+
 [[package]]
 name = "nom"
 version = "5.1.3"
@@ -2317,6 +2490,16 @@ version = "2.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "14f2252c834a40ed9bb5422029649578e63aa341ac401f74e719dd1afda8394e"
 
+[[package]]
+name = "parking_lot"
+version = "0.12.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f"
+dependencies = [
+ "lock_api",
+ "parking_lot_core",
+]
+
 [[package]]
 name = "parking_lot_core"
 version = "0.9.8"
@@ -2325,7 +2508,7 @@ checksum = "93f00c865fe7cabf650081affecd3871070f26767e7b2070a3ffae14c654b447"
 dependencies = [
  "cfg-if",
  "libc",
- "redox_syscall",
+ "redox_syscall 0.3.5",
  "smallvec",
  "windows-targets 0.48.5",
 ]
@@ -2336,6 +2519,31 @@ version = "2.3.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94"
 
+[[package]]
+name = "petgraph"
+version = "0.6.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e1d3afd2628e69da2be385eb6f2fd57c8ac7977ceeff6dc166ff1657b0e386a9"
+dependencies = [
+ "fixedbitset",
+ "indexmap 2.1.0",
+]
+
+[[package]]
+name = "phf_shared"
+version = "0.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096"
+dependencies = [
+ "siphasher",
+]
+
+[[package]]
+name = "pico-args"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5be167a7af36ee22fe3115051bc51f6e6c7054c9348e28deb4f49bd6f705a315"
+
 [[package]]
 name = "pin-project"
 version = "1.1.3"
@@ -2435,6 +2643,23 @@ version = "0.2.17"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
 
+[[package]]
+name = "precomputed-hash"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c"
+
+[[package]]
+name = "pretty"
+version = "0.12.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b55c4d17d994b637e2f4daf6e5dc5d660d209d5642377d675d7a1c3ab69fa579"
+dependencies = [
+ "arrayvec 0.5.2",
+ "typed-arena",
+ "unicode-width",
+]
+
 [[package]]
 name = "proc-macro-crate"
 version = "0.1.5"
@@ -2633,6 +2858,26 @@ dependencies = [
  "bitflags 1.3.2",
 ]
 
+[[package]]
+name = "redox_syscall"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa"
+dependencies = [
+ "bitflags 1.3.2",
+]
+
+[[package]]
+name = "redox_users"
+version = "0.4.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a18479200779601e498ada4e8c1e1f50e3ee19deb0259c25825a98b5603b2cb4"
+dependencies = [
+ "getrandom 0.2.10",
+ "libredox",
+ "thiserror",
+]
+
 [[package]]
 name = "ref-cast"
 version = "1.0.20"
@@ -2662,7 +2907,7 @@ dependencies = [
  "aho-corasick",
  "memchr",
  "regex-automata",
- "regex-syntax",
+ "regex-syntax 0.7.5",
 ]
 
 [[package]]
@@ -2673,9 +2918,15 @@ checksum = "c2f401f4955220693b56f8ec66ee9c78abffd8d1c4f23dc41a23839eb88f0795"
 dependencies = [
  "aho-corasick",
  "memchr",
- "regex-syntax",
+ "regex-syntax 0.7.5",
 ]
 
+[[package]]
+name = "regex-syntax"
+version = "0.6.29"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
+
 [[package]]
 name = "regex-syntax"
 version = "0.7.5"
@@ -2918,6 +3169,12 @@ dependencies = [
  "untrusted",
 ]
 
+[[package]]
+name = "rustversion"
+version = "1.0.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4"
+
 [[package]]
 name = "ryu"
 version = "1.0.15"
@@ -3164,6 +3421,12 @@ dependencies = [
  "event-listener",
 ]
 
+[[package]]
+name = "siphasher"
+version = "0.3.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d"
+
 [[package]]
 name = "slab"
 version = "0.4.9"
@@ -3320,6 +3583,19 @@ version = "0.1.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "213701ba3370744dcd1a12960caa4843b3d68b4d1c0a5d575e0d65b2ee9d16c0"
 
+[[package]]
+name = "string_cache"
+version = "0.8.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f91138e76242f575eb1d3b38b4f1362f10d3a43f47d182a5b359af488a02293b"
+dependencies = [
+ "new_debug_unreachable",
+ "once_cell",
+ "parking_lot",
+ "phf_shared",
+ "precomputed-hash",
+]
+
 [[package]]
 name = "strsim"
 version = "0.10.0"
@@ -3392,11 +3668,22 @@ checksum = "cb94d2f3cc536af71caac6b6fcebf65860b347e7ce0cc9ebe8f70d3e521054ef"
 dependencies = [
  "cfg-if",
  "fastrand 2.0.0",
- "redox_syscall",
+ "redox_syscall 0.3.5",
  "rustix 0.38.13",
  "windows-sys 0.48.0",
 ]
 
+[[package]]
+name = "term"
+version = "0.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c59df8ac95d96ff9bede18eb7300b0fda5e5d8d90960e76f8e14ae765eedbf1f"
+dependencies = [
+ "dirs-next",
+ "rustversion",
+ "winapi",
+]
+
 [[package]]
 name = "termcolor"
 version = "1.2.0"
@@ -3470,6 +3757,15 @@ dependencies = [
  "syn 1.0.109",
 ]
 
+[[package]]
+name = "tiny-keccak"
+version = "2.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237"
+dependencies = [
+ "crunchy",
+]
+
 [[package]]
 name = "tinytemplate"
 version = "1.2.1"
@@ -3571,7 +3867,7 @@ version = "0.19.15"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421"
 dependencies = [
- "indexmap 2.0.0",
+ "indexmap 2.1.0",
  "serde",
  "serde_spanned",
  "toml_datetime",
@@ -3664,6 +3960,12 @@ dependencies = [
  "toml_edit",
 ]
 
+[[package]]
+name = "typed-arena"
+version = "2.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6af6ae20167a9ece4bcb41af5b80f8a1f1df981f6391189ce00fd257af04126a"
+
 [[package]]
 name = "typenum"
 version = "1.17.0"
@@ -3709,6 +4011,18 @@ dependencies = [
  "tinyvec",
 ]
 
+[[package]]
+name = "unicode-width"
+version = "0.1.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e51733f11c9c4f72aa0c160008246859e340b00807569a0da0e7a1079b27ba85"
+
+[[package]]
+name = "unicode-xid"
+version = "0.2.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c"
+
 [[package]]
 name = "universal-hash"
 version = "0.4.1"
diff --git a/Cargo.toml b/Cargo.toml
index a1ccf49f..4c6d39c6 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -10,7 +10,8 @@ members = [
     "examples",
     "schemas/github",
     "tests/querygen-compile-run",
-    "tests/ui-tests"
+    "tests/ui-tests",
+    "cynic-parser"
 ]
 resolver = "2"
 
@@ -20,7 +21,8 @@ default-members = [
     "cynic-codegen",
     "cynic-introspection",
     "cynic-proc-macros",
-    "cynic-querygen"
+    "cynic-querygen",
+    "cynic-parser"
 ]
 
 [workspace.package]
diff --git a/cynic-parser/Cargo.toml b/cynic-parser/Cargo.toml
new file mode 100644
index 00000000..54742e6d
--- /dev/null
+++ b/cynic-parser/Cargo.toml
@@ -0,0 +1,31 @@
+[package]
+name = "cynic-parser"
+version = "0.1.0"
+edition = "2021"
+publish = false
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[features]
+# TODO: Disable this at some point
+default = ["sdl"]
+sdl = ["pretty"]
+
+[dependencies]
+indexmap = "2"
+lalrpop-util = "0.20.0"
+logos = "0.13"
+pretty = { version = "0.12", optional = true }
+
+[dev-dependencies]
+criterion = "0.4"
+graphql-parser = "0.4"
+insta = "1.29"
+
+[build-dependencies]
+lalrpop = "0.20.0"
+
+
+[[bench]]
+name = "parsing-benchmark"
+harness = false
\ No newline at end of file
diff --git a/cynic-parser/benches/parsing-benchmark.rs b/cynic-parser/benches/parsing-benchmark.rs
new file mode 100644
index 00000000..003d4d9e
--- /dev/null
+++ b/cynic-parser/benches/parsing-benchmark.rs
@@ -0,0 +1,25 @@
+use criterion::{black_box, criterion_group, criterion_main, Criterion};
+use cynic_parser::Ast;
+
+pub fn criterion_benchmark(c: &mut Criterion) {
+    let input = "type MyType { field: Whatever, field: Whatever }";
+    c.bench_function("cynic-parser parse object", |b| {
+        b.iter(|| {
+            let lexer = cynic_parser::Lexer::new(input);
+            let object = cynic_parser::ObjectDefinitionParser::new()
+                .parse(input, &mut Ast::new(), lexer)
+                .unwrap();
+            black_box(object)
+        })
+    });
+
+    c.bench_function("graphql_parser parse object", |b| {
+        b.iter(|| {
+            let parsed = graphql_parser::parse_schema::<String>(input).unwrap();
+            black_box(parsed)
+        })
+    });
+}
+
+criterion_group!(benches, criterion_benchmark);
+criterion_main!(benches);
diff --git a/cynic-parser/build.rs b/cynic-parser/build.rs
new file mode 100644
index 00000000..ca5c2836
--- /dev/null
+++ b/cynic-parser/build.rs
@@ -0,0 +1,3 @@
+fn main() {
+    lalrpop::process_root().unwrap();
+}
diff --git a/cynic-parser/src/ast.rs b/cynic-parser/src/ast.rs
new file mode 100644
index 00000000..6d7073e0
--- /dev/null
+++ b/cynic-parser/src/ast.rs
@@ -0,0 +1,262 @@
+use std::collections::HashMap;
+
+use ids::*;
+use indexmap::IndexSet;
+
+pub(crate) mod ids;
+mod reader;
+
+pub use reader::{AstReader, Definition, ValueReader};
+
+#[derive(Default)]
+pub struct Ast {
+    strings: IndexSet<Box<str>>,
+
+    nodes: Vec<Node>,
+
+    definition_nodes: Vec<NodeId>,
+
+    schema_definitions: Vec<SchemaDefinition>,
+    object_definitions: Vec<ObjectDefinition>,
+    input_object_definitions: Vec<InputObjectDefinition>,
+
+    field_definitions: Vec<FieldDefinition>,
+    input_value_definitions: Vec<InputValueDefinition>,
+
+    type_references: Vec<Type>,
+
+    string_literals: Vec<StringLiteral>,
+
+    values: Vec<Value>,
+    directives: Vec<Directive>,
+    arguments: Vec<Argument>,
+
+    definition_descriptions: HashMap<NodeId, NodeId>,
+}
+
+// TODO: NonZeroUsize these?
+pub struct Node {
+    contents: NodeContents,
+    // span: Span
+}
+
+pub enum NodeContents {
+    Ident(StringId),
+    SchemaDefinition(SchemaDefinitionId),
+    ObjectDefiniton(ObjectDefinitionId),
+    FieldDefinition(FieldDefinitionId),
+    InputObjectDefiniton(InputObjectDefinitionId),
+    InputValueDefinition(InputValueDefinitionId),
+    StringLiteral(StringLiteralId),
+}
+
+pub struct SchemaDefinition {
+    pub roots: Vec<RootOperationTypeDefinition>,
+}
+
+pub struct ObjectDefinition {
+    pub name: StringId,
+    pub fields: Vec<NodeId>,
+    pub directives: Vec<DirectiveId>,
+}
+
+pub struct FieldDefinition {
+    pub name: StringId,
+    pub ty: TypeId,
+    pub arguments: Vec<NodeId>,
+    pub description: Option<NodeId>,
+}
+
+pub struct InputObjectDefinition {
+    pub name: StringId,
+    pub fields: Vec<NodeId>,
+    pub directives: Vec<DirectiveId>,
+}
+
+pub struct InputValueDefinition {
+    pub name: StringId,
+    pub ty: TypeId,
+    pub description: Option<NodeId>,
+    pub default: Option<ValueId>,
+}
+
+pub struct RootOperationTypeDefinition {
+    pub operation_type: OperationType,
+    pub named_type: StringId,
+}
+
+pub struct Type {
+    pub name: StringId,
+    pub wrappers: Vec<WrappingType>,
+}
+
+pub enum WrappingType {
+    NonNull,
+    List,
+}
+
+#[derive(Clone, Copy, Debug)]
+pub enum OperationType {
+    Query,
+    Mutation,
+    Subscription,
+}
+
+impl std::fmt::Display for OperationType {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        match self {
+            OperationType::Query => write!(f, "query"),
+            OperationType::Mutation => write!(f, "mutation"),
+            OperationType::Subscription => write!(f, "subscription"),
+        }
+    }
+}
+
+pub enum StringLiteral {
+    Normal(StringId),
+    Block(StringId),
+}
+
+pub struct Directive {
+    pub name: StringId,
+    pub arguments: Vec<ArgumentId>,
+}
+
+pub struct Argument {
+    pub name: StringId,
+    pub value: ValueId,
+}
+
+pub enum Value {
+    Variable(StringId),
+    Int(i32),
+    Float(f32),
+    String(StringId),
+    Boolean(bool),
+    Null,
+    Enum(StringId),
+    List(Vec<ValueId>),
+    Object(Vec<(StringId, ValueId)>),
+}
+
+// TODO: Don't forget the spans etc.
+// TODO: make this whole impl into a builder that wraps an Ast.
+// Then the default Reader stuff can just go on Ast - much more sensible...
+impl Ast {
+    pub fn new() -> Self {
+        Ast::default()
+    }
+
+    pub fn definitions(&mut self, ids: Vec<(Option<NodeId>, NodeId)>) {
+        for (description, definition) in ids {
+            if let Some(description) = description {
+                self.definition_descriptions.insert(definition, description);
+            }
+            self.definition_nodes.push(definition);
+        }
+    }
+
+    pub fn schema_definition(&mut self, definition: SchemaDefinition) -> NodeId {
+        let definition_id = SchemaDefinitionId(self.schema_definitions.len());
+        self.schema_definitions.push(definition);
+
+        let node_id = NodeId(self.nodes.len());
+        let contents = NodeContents::SchemaDefinition(definition_id);
+
+        self.nodes.push(Node { contents });
+
+        node_id
+    }
+
+    pub fn object_definition(&mut self, definition: ObjectDefinition) -> NodeId {
+        let definition_id = ObjectDefinitionId(self.object_definitions.len());
+        self.object_definitions.push(definition);
+
+        let node_id = NodeId(self.nodes.len());
+        let contents = NodeContents::ObjectDefiniton(definition_id);
+
+        self.nodes.push(Node { contents });
+
+        node_id
+    }
+
+    pub fn field_definition(&mut self, definition: FieldDefinition) -> NodeId {
+        let definition_id = FieldDefinitionId(self.field_definitions.len());
+        self.field_definitions.push(definition);
+
+        let node_id = NodeId(self.nodes.len());
+        let contents = NodeContents::FieldDefinition(definition_id);
+
+        self.nodes.push(Node { contents });
+
+        node_id
+    }
+
+    pub fn input_object_definition(&mut self, definition: InputObjectDefinition) -> NodeId {
+        let definition_id = InputObjectDefinitionId(self.input_object_definitions.len());
+        self.input_object_definitions.push(definition);
+
+        let node_id = NodeId(self.nodes.len());
+        let contents = NodeContents::InputObjectDefiniton(definition_id);
+
+        self.nodes.push(Node { contents });
+
+        node_id
+    }
+
+    pub fn input_value_definition(&mut self, definition: InputValueDefinition) -> NodeId {
+        let definition_id = InputValueDefinitionId(self.input_value_definitions.len());
+        self.input_value_definitions.push(definition);
+
+        let node_id = NodeId(self.nodes.len());
+        let contents = NodeContents::InputValueDefinition(definition_id);
+        self.nodes.push(Node { contents });
+
+        node_id
+    }
+
+    pub fn type_reference(&mut self, ty: Type) -> TypeId {
+        let ty_id = TypeId(self.type_references.len());
+        self.type_references.push(ty);
+        ty_id
+    }
+
+    pub fn directive(&mut self, directive: Directive) -> DirectiveId {
+        let id = DirectiveId(self.directives.len());
+        self.directives.push(directive);
+        id
+    }
+
+    pub fn argument(&mut self, argument: Argument) -> ArgumentId {
+        let id = ArgumentId(self.arguments.len());
+        self.arguments.push(argument);
+        id
+    }
+
+    pub fn value(&mut self, value: Value) -> ValueId {
+        let id = ValueId(self.values.len());
+        self.values.push(value);
+        id
+    }
+
+    pub fn string_literal(&mut self, literal: StringLiteral) -> NodeId {
+        let literal_id = StringLiteralId(self.string_literals.len());
+        self.string_literals.push(literal);
+
+        let node_id = NodeId(self.nodes.len());
+        let contents = NodeContents::StringLiteral(literal_id);
+        self.nodes.push(Node { contents });
+
+        node_id
+    }
+
+    pub fn ident(&mut self, ident: &str) -> StringId {
+        self.intern_string(ident)
+    }
+
+    // TOOD: should this be pub? not sure...
+    pub fn intern_string(&mut self, string: &str) -> StringId {
+        let (id, _) = self.strings.insert_full(string.into());
+        StringId(id)
+    }
+}
diff --git a/cynic-parser/src/ast/ids.rs b/cynic-parser/src/ast/ids.rs
new file mode 100644
index 00000000..2a4f7309
--- /dev/null
+++ b/cynic-parser/src/ast/ids.rs
@@ -0,0 +1,171 @@
+use crate::Ast;
+
+use super::{
+    Argument, Directive, FieldDefinition, InputObjectDefinition, InputValueDefinition, Node,
+    ObjectDefinition, SchemaDefinition, Type, Value,
+};
+
+pub trait AstId {}
+
+pub(crate) trait AstLookup<Id> {
+    type Output: ?Sized;
+
+    fn lookup(&self, index: Id) -> &Self::Output;
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct NodeId(pub(super) usize);
+
+impl AstLookup<NodeId> for Ast {
+    type Output = Node;
+
+    fn lookup(&self, index: NodeId) -> &Self::Output {
+        &self.nodes[index.0]
+    }
+}
+
+impl AstId for NodeId {}
+
+#[derive(Clone, Copy)]
+pub struct StringId(pub(super) usize);
+
+impl AstLookup<StringId> for Ast {
+    type Output = str;
+
+    fn lookup(&self, index: StringId) -> &Self::Output {
+        self.strings
+            .get_index(index.0)
+            .expect("strings to be present")
+    }
+}
+
+#[derive(Clone, Copy)]
+pub struct SchemaDefinitionId(pub(super) usize);
+
+impl AstId for SchemaDefinitionId {}
+
+impl AstLookup<SchemaDefinitionId> for Ast {
+    type Output = SchemaDefinition;
+
+    fn lookup(&self, index: SchemaDefinitionId) -> &Self::Output {
+        self.schema_definitions
+            .get(index.0)
+            .expect("objects to be present")
+    }
+}
+
+#[derive(Clone, Copy)]
+pub struct ObjectDefinitionId(pub(super) usize);
+
+impl AstId for ObjectDefinitionId {}
+
+impl AstLookup<ObjectDefinitionId> for Ast {
+    type Output = ObjectDefinition;
+
+    fn lookup(&self, index: ObjectDefinitionId) -> &Self::Output {
+        self.object_definitions
+            .get(index.0)
+            .expect("objects to be present")
+    }
+}
+
+#[derive(Clone, Copy)]
+pub struct InputObjectDefinitionId(pub(super) usize);
+impl AstId for InputObjectDefinitionId {}
+
+impl AstLookup<InputObjectDefinitionId> for Ast {
+    type Output = InputObjectDefinition;
+
+    fn lookup(&self, index: InputObjectDefinitionId) -> &Self::Output {
+        self.input_object_definitions
+            .get(index.0)
+            .expect("objects to be present")
+    }
+}
+
+#[derive(Clone, Copy)]
+pub struct FieldDefinitionId(pub(super) usize);
+
+impl AstId for FieldDefinitionId {}
+
+impl AstLookup<FieldDefinitionId> for Ast {
+    type Output = FieldDefinition;
+
+    fn lookup(&self, index: FieldDefinitionId) -> &Self::Output {
+        self.field_definitions
+            .get(index.0)
+            .expect("objects to be present")
+    }
+}
+
+#[derive(Clone, Copy)]
+pub struct InputValueDefinitionId(pub(super) usize);
+
+impl AstId for InputValueDefinitionId {}
+
+impl AstLookup<InputValueDefinitionId> for Ast {
+    type Output = InputValueDefinition;
+
+    fn lookup(&self, index: InputValueDefinitionId) -> &Self::Output {
+        self.input_value_definitions
+            .get(index.0)
+            .expect("objects to be present")
+    }
+}
+
+#[derive(Clone, Copy)]
+pub struct TypeId(pub(super) usize);
+
+impl AstId for TypeId {}
+
+impl AstLookup<TypeId> for Ast {
+    type Output = Type;
+
+    fn lookup(&self, index: TypeId) -> &Self::Output {
+        self.type_references
+            .get(index.0)
+            .expect("types to be present")
+    }
+}
+
+#[derive(Clone, Copy)]
+pub struct DirectiveId(pub(super) usize);
+
+impl AstId for DirectiveId {}
+
+impl AstLookup<DirectiveId> for Ast {
+    type Output = Directive;
+
+    fn lookup(&self, index: DirectiveId) -> &Self::Output {
+        self.directives.get(index.0).expect("values to be present")
+    }
+}
+
+#[derive(Clone, Copy)]
+pub struct ArgumentId(pub(super) usize);
+
+impl AstId for ArgumentId {}
+
+impl AstLookup<ArgumentId> for Ast {
+    type Output = Argument;
+
+    fn lookup(&self, index: ArgumentId) -> &Self::Output {
+        self.arguments.get(index.0).expect("values to be present")
+    }
+}
+
+#[derive(Clone, Copy)]
+pub struct ValueId(pub(super) usize);
+
+impl AstId for ValueId {}
+
+impl AstLookup<ValueId> for Ast {
+    type Output = Value;
+
+    fn lookup(&self, index: ValueId) -> &Self::Output {
+        self.values.get(index.0).expect("values to be present")
+    }
+}
+
+#[derive(Clone, Copy)]
+pub struct StringLiteralId(pub(super) usize);
diff --git a/cynic-parser/src/ast/reader.rs b/cynic-parser/src/ast/reader.rs
new file mode 100644
index 00000000..a6604dc5
--- /dev/null
+++ b/cynic-parser/src/ast/reader.rs
@@ -0,0 +1,225 @@
+use crate::Ast;
+
+use super::{
+    ids::{ArgumentId, AstId, AstLookup, DirectiveId, InputValueDefinitionId, TypeId, ValueId},
+    FieldDefinitionId, InputObjectDefinitionId, NodeContents, ObjectDefinitionId, OperationType,
+    SchemaDefinitionId, Type, WrappingType,
+};
+
+pub struct AstReader<'a, I> {
+    id: I,
+    ast: &'a Ast,
+}
+
+impl super::Ast {
+    pub fn reader(&self) -> AstReader<'_, ()> {
+        AstReader { id: (), ast: self }
+    }
+
+    pub fn read<Id>(&self, id: Id) -> AstReader<'_, Id>
+    where
+        Id: AstId,
+    {
+        AstReader { id, ast: self }
+    }
+}
+
+impl<'a> AstReader<'a, ()> {
+    pub fn definitions(&self) -> impl Iterator<Item = Definition<'a>> + 'a {
+        self.ast.definition_nodes.iter().map(|definition| {
+            match self.ast.nodes[definition.0].contents {
+                NodeContents::SchemaDefinition(id) => {
+                    Definition::Schema(AstReader { id, ast: self.ast })
+                }
+                NodeContents::ObjectDefiniton(id) => {
+                    Definition::Object(AstReader { id, ast: self.ast })
+                }
+                NodeContents::InputObjectDefiniton(id) => {
+                    Definition::InputObject(AstReader { id, ast: self.ast })
+                }
+                NodeContents::FieldDefinition(_)
+                | NodeContents::InputValueDefinition(_)
+                | NodeContents::StringLiteral(_) => unreachable!(),
+                NodeContents::Ident(_) => unreachable!(),
+            }
+        })
+    }
+}
+
+pub enum Definition<'a> {
+    Schema(AstReader<'a, SchemaDefinitionId>),
+    Object(AstReader<'a, ObjectDefinitionId>),
+    InputObject(AstReader<'a, InputObjectDefinitionId>),
+}
+
+impl<'a> AstReader<'a, SchemaDefinitionId> {
+    pub fn root_operations(&self) -> impl Iterator<Item = (OperationType, &'a str)> {
+        self.ast
+            .lookup(self.id)
+            .roots
+            .iter()
+            .map(|root| (root.operation_type, self.ast.lookup(root.named_type)))
+    }
+}
+
+impl<'a> AstReader<'a, ObjectDefinitionId> {
+    pub fn name(&self) -> &str {
+        self.ast.lookup(self.ast.lookup(self.id).name)
+    }
+
+    pub fn fields(&self) -> impl Iterator<Item = AstReader<'a, FieldDefinitionId>> + 'a {
+        self.ast
+            .lookup(self.id)
+            .fields
+            .iter()
+            .map(|node| match self.ast.lookup(*node).contents {
+                NodeContents::FieldDefinition(id) => self.ast.read(id),
+                _ => unreachable!(),
+            })
+    }
+
+    pub fn directives(&self) -> impl Iterator<Item = AstReader<'a, DirectiveId>> + 'a {
+        self.ast
+            .lookup(self.id)
+            .directives
+            .iter()
+            .map(|id| self.ast.read(*id))
+    }
+}
+
+impl<'a> AstReader<'a, InputObjectDefinitionId> {
+    pub fn name(&self) -> &str {
+        self.ast.lookup(self.ast.lookup(self.id).name)
+    }
+
+    pub fn fields(&self) -> impl Iterator<Item = AstReader<'a, InputValueDefinitionId>> + 'a {
+        self.ast
+            .lookup(self.id)
+            .fields
+            .iter()
+            .map(|node| match self.ast.lookup(*node).contents {
+                NodeContents::InputValueDefinition(id) => self.ast.read(id),
+                _ => unreachable!(),
+            })
+    }
+
+    pub fn directives(&self) -> impl Iterator<Item = AstReader<'a, DirectiveId>> + 'a {
+        self.ast
+            .lookup(self.id)
+            .directives
+            .iter()
+            .map(|id| self.ast.read(*id))
+    }
+}
+
+impl<'a> AstReader<'a, FieldDefinitionId> {
+    pub fn name(&self) -> &str {
+        self.ast.lookup(self.ast.lookup(self.id).name)
+    }
+
+    pub fn ty(&self) -> AstReader<'a, TypeId> {
+        self.ast.read(self.ast.lookup(self.id).ty)
+    }
+
+    pub fn arguments(&self) -> impl Iterator<Item = AstReader<'a, InputValueDefinitionId>> {
+        self.ast.lookup(self.id).arguments.iter().map(|node| {
+            match self.ast.lookup(*node).contents {
+                NodeContents::InputValueDefinition(id) => self.ast.read(id),
+                _ => unreachable!(),
+            }
+        })
+    }
+}
+
+impl<'a> AstReader<'a, InputValueDefinitionId> {
+    pub fn name(&self) -> &str {
+        self.ast.lookup(self.ast.lookup(self.id).name)
+    }
+
+    pub fn ty(&self) -> AstReader<'a, TypeId> {
+        self.ast.read(self.ast.lookup(self.id).ty)
+    }
+
+    pub fn default_value(&self) -> Option<AstReader<'a, ValueId>> {
+        self.ast.lookup(self.id).default.map(|id| self.ast.read(id))
+    }
+}
+
+impl<'a> AstReader<'a, DirectiveId> {
+    pub fn name(&self) -> &str {
+        self.ast.lookup(self.ast.lookup(self.id).name)
+    }
+
+    pub fn arguments(&self) -> impl Iterator<Item = AstReader<'a, ArgumentId>> {
+        self.ast
+            .lookup(self.id)
+            .arguments
+            .iter()
+            .map(|id| self.ast.read(*id))
+    }
+}
+
+impl<'a> AstReader<'a, ArgumentId> {
+    pub fn name(&self) -> &str {
+        self.ast.lookup(self.ast.lookup(self.id).name)
+    }
+
+    pub fn value(&self) -> AstReader<'a, ValueId> {
+        self.ast.read(self.ast.lookup(self.id).value)
+    }
+}
+
+impl<'a> AstReader<'a, ValueId> {
+    pub fn value(&self) -> ValueReader<'a> {
+        match self.ast.lookup(self.id) {
+            super::Value::Variable(id) => ValueReader::Variable(self.ast.lookup(*id)),
+            super::Value::Int(num) => ValueReader::Int(*num),
+            super::Value::Float(num) => ValueReader::Float(*num),
+            super::Value::String(id) => ValueReader::String(self.ast.lookup(*id)),
+            super::Value::Boolean(val) => ValueReader::Boolean(*val),
+            super::Value::Null => ValueReader::Null,
+            super::Value::Enum(id) => ValueReader::Enum(self.ast.lookup(*id)),
+            super::Value::List(ids) => {
+                ValueReader::List(ids.iter().map(|id| self.ast.read(*id)).collect())
+            }
+            super::Value::Object(pairs) => ValueReader::Object(
+                pairs
+                    .iter()
+                    .map(|(name, value)| (self.ast.lookup(*name), self.ast.read(*value)))
+                    .collect(),
+            ),
+        }
+    }
+}
+
+pub enum ValueReader<'a> {
+    Variable(&'a str),
+    Int(i32),
+    Float(f32),
+    String(&'a str),
+    Boolean(bool),
+    Null,
+    Enum(&'a str),
+    List(Vec<AstReader<'a, ValueId>>),
+    Object(Vec<(&'a str, AstReader<'a, ValueId>)>),
+}
+
+impl<'a> AstReader<'a, TypeId> {
+    pub fn to_string(&self) -> String {
+        let Type { name, wrappers } = self.ast.lookup(self.id);
+        let mut output = String::new();
+        for wrapping in wrappers.iter().rev() {
+            if let WrappingType::List = wrapping {
+                output.push('[');
+            }
+        }
+        output.push_str(self.ast.lookup(*name));
+        for wrapping in wrappers.iter() {
+            match wrapping {
+                WrappingType::NonNull => output.push('!'),
+                WrappingType::List => output.push(']'),
+            }
+        }
+        output
+    }
+}
diff --git a/cynic-parser/src/lexer/mod.rs b/cynic-parser/src/lexer/mod.rs
new file mode 100644
index 00000000..d7c16dc5
--- /dev/null
+++ b/cynic-parser/src/lexer/mod.rs
@@ -0,0 +1,36 @@
+mod tokens;
+
+use logos::{Logos, SpannedIter};
+pub use tokens::*;
+
+pub type Spanned<Tok, Loc, Error> = Result<(Loc, Tok, Loc), Error>;
+
+#[derive(Debug)]
+pub enum LexicalError {
+    InvalidToken,
+}
+
+pub struct Lexer<'input> {
+    // instead of an iterator over characters, we have a token iterator
+    token_stream: SpannedIter<'input, Token<'input>>,
+}
+
+impl<'input> Lexer<'input> {
+    pub fn new(input: &'input str) -> Self {
+        Self {
+            token_stream: Token::lexer(input).spanned(),
+        }
+    }
+}
+
+impl<'input> Iterator for Lexer<'input> {
+    type Item = Spanned<Token<'input>, usize, LexicalError>;
+
+    fn next(&mut self) -> Option<Self::Item> {
+        match self.token_stream.next() {
+            None => None,
+            Some((Ok(token), span)) => Some(Ok((span.start, token, span.end))),
+            Some((Err(_), _)) => Some(Err(LexicalError::InvalidToken)),
+        }
+    }
+}
diff --git a/cynic-parser/src/lexer/tokens.rs b/cynic-parser/src/lexer/tokens.rs
new file mode 100644
index 00000000..31602532
--- /dev/null
+++ b/cynic-parser/src/lexer/tokens.rs
@@ -0,0 +1,590 @@
+/*
+* The code in this file is taken from
+* https://github.com/facebook/relay/blob/main/compiler/crates/graphql-syntax/src/lexer.rs
+*
+* Licensed under the MIT license:
+*
+* Copyright (c) Meta Platforms, Inc. and affiliates.
+*
+* Permission is hereby granted, free of charge, to any person obtaining a copy
+* of this software and associated documentation files (the "Software"), to deal
+* in the Software without restriction, including without limitation the rights
+* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the Software is
+* furnished to do so, subject to the following conditions:
+*
+* The above copyright notice and this permission notice shall be included in all
+* copies or substantial portions of the Software.
+*
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+* SOFTWARE.
+*/
+
+use std::fmt;
+
+use logos::Lexer;
+use logos::Logos;
+
+#[derive(Default, Eq, PartialEq)]
+pub struct TokenExtras {
+    /// Token callbacks might store an error token kind in here before failing.
+    /// This is then picked up in the parser to turn the `Error` token into a
+    /// more specific variant.
+    pub error_token: Option<Token<'static>>,
+}
+
+/// Lexer for the GraphQL specification: http://spec.graphql.org/
+#[derive(Logos, Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
+#[logos(extras = TokenExtras, skip r"[ \t\r\n\f,\ufeff]+|#[^\n\r]*")]
+pub enum Token<'a> {
+    ErrorUnterminatedString,
+    ErrorUnsupportedStringCharacter,
+    ErrorUnterminatedBlockString,
+    Empty,
+
+    // Valid tokens
+    #[token("&")]
+    Ampersand,
+
+    #[token("@")]
+    At,
+
+    #[token("}")]
+    CloseBrace,
+
+    #[token("]")]
+    CloseBracket,
+
+    #[token(")")]
+    CloseParen,
+
+    #[token(":")]
+    Colon,
+
+    #[token("$")]
+    Dollar,
+
+    EndOfFile,
+
+    #[token("=")]
+    Equals,
+
+    #[token("!")]
+    Exclamation,
+
+    #[token("schema")]
+    Schema,
+
+    #[token("query")]
+    Query,
+
+    #[token("type")]
+    Type,
+
+    #[token("input")]
+    Input,
+
+    #[token("true")]
+    True,
+
+    #[token("false")]
+    False,
+
+    #[token("null")]
+    Null,
+
+    // IntegerPart:    -?(0|[1-9][0-9]*)
+    // FractionalPart: \\.[0-9]+
+    // ExponentPart:   [eE][+-]?[0-9]+
+    #[regex("-?(0|[1-9][0-9]*)(\\.[0-9]+[eE][+-]?[0-9]+|\\.[0-9]+|[eE][+-]?[0-9]+)", |lex| lex.slice())]
+    FloatLiteral(&'a str),
+
+    #[regex("[a-zA-Z_][a-zA-Z0-9_]*", |lex| lex.slice())]
+    Identifier(&'a str),
+
+    #[regex("-?(0|[1-9][0-9]*)", |lex| lex.slice())]
+    IntegerLiteral(&'a str),
+
+    #[regex("-?0[0-9]+(\\.[0-9]+[eE][+-]?[0-9]+|\\.[0-9]+|[eE][+-]?[0-9]+)?")]
+    ErrorNumberLiteralLeadingZero,
+
+    #[regex("-?(0|[1-9][0-9]*)(\\.[0-9]+[eE][+-]?[0-9]+|\\.[0-9]+|[eE][+-]?[0-9]+)?[.a-zA-Z_]")]
+    ErrorNumberLiteralTrailingInvalid,
+
+    #[regex("-?(\\.[0-9]+[eE][+-]?[0-9]+|\\.[0-9]+)")]
+    ErrorFloatLiteralMissingZero,
+
+    #[token("{")]
+    OpenBrace,
+
+    #[token("[")]
+    OpenBracket,
+
+    #[token("(")]
+    OpenParen,
+
+    #[token(".")]
+    Period,
+
+    #[token("..")]
+    PeriodPeriod,
+
+    #[token("|")]
+    Pipe,
+
+    #[token("...")]
+    Spread,
+
+    #[token("\"", lex_string)]
+    StringLiteral(&'a str),
+
+    #[token("\"\"\"", lex_block_string)]
+    BlockStringLiteral(&'a str),
+}
+
+#[derive(Logos, Debug)]
+pub enum StringToken {
+    #[regex(r#"\\["\\/bfnrt]"#)]
+    EscapedCharacter,
+
+    #[regex(r#"\\u[0-9A-Fa-f][0-9A-Fa-f][0-9A-Fa-f][0-9A-Fa-f]"#)]
+    EscapedUnicode,
+
+    #[token("\"")]
+    Quote,
+
+    #[regex(r#"\n|\r|\r\n"#)]
+    LineTerminator,
+
+    #[regex(r#"[\u0009\u0020\u0021\u0023-\u005B\u005D-\uFFFF]+"#)]
+    StringCharacters,
+}
+
+fn lex_string<'a>(lexer: &mut Lexer<'a, Token<'a>>) -> Option<&'a str> {
+    let remainder = lexer.remainder();
+    let mut string_lexer = StringToken::lexer(remainder);
+    while let Some(string_token) = string_lexer.next() {
+        match string_token {
+            Ok(StringToken::Quote) => {
+                lexer.bump(string_lexer.span().end);
+                return Some(lexer.slice());
+            }
+            Ok(StringToken::LineTerminator) => {
+                lexer.bump(string_lexer.span().start);
+                lexer.extras.error_token = Some(Token::ErrorUnterminatedString);
+                return None;
+            }
+            Ok(
+                StringToken::EscapedCharacter
+                | StringToken::EscapedUnicode
+                | StringToken::StringCharacters,
+            ) => {}
+            Err(_) => {
+                lexer.extras.error_token = Some(Token::ErrorUnsupportedStringCharacter);
+                return None;
+            }
+        }
+    }
+    lexer.extras.error_token = Some(Token::ErrorUnterminatedString);
+    None
+}
+
+fn lex_block_string<'a>(lexer: &mut Lexer<'a, Token<'a>>) -> Option<&'a str> {
+    let remainder = lexer.remainder();
+    let mut string_lexer = BlockStringToken::lexer(remainder);
+    while let Some(string_token) = string_lexer.next() {
+        match string_token {
+            Ok(BlockStringToken::TripleQuote) => {
+                lexer.bump(string_lexer.span().end);
+                return Some(lexer.slice());
+            }
+            Ok(BlockStringToken::EscapedTripleQuote | BlockStringToken::Other) => {}
+            Err(_) => unreachable!(),
+        }
+    }
+    lexer.extras.error_token = Some(Token::ErrorUnterminatedBlockString);
+    None
+}
+
+#[derive(Logos, Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
+pub enum BlockStringToken {
+    #[token("\\\"\"\"")]
+    EscapedTripleQuote,
+
+    #[token("\"\"\"")]
+    TripleQuote,
+
+    #[regex(r#"[\u0009\u000A\u000D\u0020-\uFFFF]"#)]
+    Other,
+}
+
+impl fmt::Display for Token<'_> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        let message = match self {
+            Token::Ampersand => "ampersand ('&')",
+            Token::At => "at ('@')",
+            Token::CloseBrace => "closing brace ('}')",
+            Token::CloseBracket => "closing bracket (']')",
+            Token::CloseParen => "closing paren (')')",
+            Token::Colon => "colon (':')",
+            Token::Dollar => "dollar ('$')",
+            Token::EndOfFile => "end of file",
+            Token::Equals => "equals ('=')",
+            Token::Exclamation => "exclamation mark ('!')",
+            Token::FloatLiteral(_) => "floating point value (e.g. '3.14')",
+            Token::Identifier(_) => "non-variable identifier (e.g. 'x' or 'Foo')",
+            Token::IntegerLiteral(_) => "integer value (e.g. '0' or '42')",
+            Token::OpenBrace => "open brace ('{')",
+            Token::OpenBracket => "open bracket ('[')",
+            Token::OpenParen => "open parenthesis ('(')",
+            Token::Period => "period ('.')",
+            Token::PeriodPeriod => "double period ('..')",
+            Token::Pipe => "pipe ('|')",
+            Token::Spread => "spread ('...')",
+            Token::BlockStringLiteral(_) => "block string (e.g. '\"\"\"hi\"\"\"')",
+            Token::ErrorFloatLiteralMissingZero => "unsupported number (int or float) literal",
+            Token::ErrorNumberLiteralLeadingZero => "unsupported number (int or float) literal",
+            Token::ErrorNumberLiteralTrailingInvalid => "unsupported number (int or float) literal",
+            Token::StringLiteral(_) => "string literal (e.g. '\"...\"')",
+            Token::ErrorUnterminatedString => "unterminated string",
+            Token::ErrorUnsupportedStringCharacter => "unsupported character in string",
+            Token::ErrorUnterminatedBlockString => "unterminated block string",
+            Token::Empty => "missing expected kind",
+            Token::Schema => "schema",
+            Token::Query => "query",
+            Token::Type => "type",
+            Token::Input => "input",
+            Token::True => "true",
+            Token::False => "false",
+            Token::Null => "null,",
+        };
+        f.write_str(message)
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+
+    fn assert_token(source: &str, kind: Token, length: usize) {
+        let mut lexer = Token::lexer(source);
+        assert_eq!(
+            lexer.next(),
+            Some(Ok(kind)),
+            "Testing the lexing of string '{}'",
+            source
+        );
+        assert_eq!(
+            lexer.span(),
+            0..length,
+            "Testing the lexing of string '{}'",
+            source
+        );
+    }
+
+    fn assert_error(source: &str, length: usize) {
+        let mut lexer = Token::lexer(source);
+        assert_eq!(
+            lexer.next(),
+            Some(Err(())),
+            "Testing lexing fails for string '{}'",
+            source
+        );
+        assert_eq!(
+            lexer.span(),
+            0..length,
+            "Testing the lexing of string '{}'",
+            source
+        );
+    }
+
+    #[test]
+    fn test_number_successes() {
+        assert_token("4", Token::IntegerLiteral("4"), 1);
+        assert_token("4.123", Token::FloatLiteral("4.123"), 5);
+        assert_token("-4", Token::IntegerLiteral("-4"), 2);
+        assert_token("9", Token::IntegerLiteral("9"), 1);
+        assert_token("0", Token::IntegerLiteral("0"), 1);
+        assert_token("-4.123", Token::FloatLiteral("-4.123"), 6);
+        assert_token("0.123", Token::FloatLiteral("0.123"), 5);
+        assert_token("123e4", Token::FloatLiteral("123e4"), 5);
+        assert_token("123E4", Token::FloatLiteral("123E4"), 5);
+        assert_token("123e-4", Token::FloatLiteral("123e-4"), 6);
+        assert_token("123e+4", Token::FloatLiteral("123e+4"), 6);
+        assert_token("-1.123e4", Token::FloatLiteral("-1.123e4"), 8);
+        assert_token("-1.123E4", Token::FloatLiteral("-1.123E4"), 8);
+        assert_token("-1.123e-4", Token::FloatLiteral("-1.123e-4"), 9);
+        assert_token("-1.123e+4", Token::FloatLiteral("-1.123e+4"), 9);
+        assert_token("-1.123e4567", Token::FloatLiteral("-1.123e4567"), 11);
+        assert_token("-0", Token::IntegerLiteral("-0"), 2);
+    }
+
+    #[test]
+    fn test_number_failures() {
+        assert_token("00", Token::ErrorNumberLiteralLeadingZero, 2);
+        assert_token("01", Token::ErrorNumberLiteralLeadingZero, 2);
+        assert_token("-01", Token::ErrorNumberLiteralLeadingZero, 3);
+        assert_error("+1", 1);
+        assert_token("01.23", Token::ErrorNumberLiteralLeadingZero, 5);
+        assert_token("1.", Token::ErrorNumberLiteralTrailingInvalid, 2);
+        assert_token("1e", Token::ErrorNumberLiteralTrailingInvalid, 2);
+        assert_token("1.e1", Token::ErrorNumberLiteralTrailingInvalid, 2);
+        assert_token("1.A", Token::ErrorNumberLiteralTrailingInvalid, 2);
+        assert_error("-A", 1);
+        assert_token("1.0e", Token::ErrorNumberLiteralTrailingInvalid, 4);
+        assert_token("1.0eA", Token::ErrorNumberLiteralTrailingInvalid, 4);
+        assert_token("1.2e3e", Token::ErrorNumberLiteralTrailingInvalid, 6);
+        assert_token("1.2e3.4", Token::ErrorNumberLiteralTrailingInvalid, 6);
+        assert_token("1.23.4", Token::ErrorNumberLiteralTrailingInvalid, 5);
+        assert_token(".123", Token::ErrorFloatLiteralMissingZero, 4);
+
+        // check that we don't consume trailing valid items
+        assert_token("1.23.{}", Token::ErrorNumberLiteralTrailingInvalid, 5);
+        assert_token("1.23. {}", Token::ErrorNumberLiteralTrailingInvalid, 5);
+        assert_token("1.23. []", Token::ErrorNumberLiteralTrailingInvalid, 5);
+        assert_token("1.23. foo", Token::ErrorNumberLiteralTrailingInvalid, 5);
+        assert_token("1.23. $foo", Token::ErrorNumberLiteralTrailingInvalid, 5);
+    }
+
+    #[test]
+    fn test_lexing() {
+        let input = "
+           query EmptyQuery($id: ID!) {
+             node(id: $id) {
+               id @skip(if: false)
+               ...E1
+             }
+           }
+         ";
+        let mut lexer = Token::lexer(input);
+
+        assert_eq!(lexer.next(), Some(Ok(Token::Query)));
+        assert_eq!(lexer.slice(), "query");
+
+        assert_eq!(lexer.next(), Some(Ok(Token::Identifier("EmptyQuery"))));
+        assert_eq!(lexer.slice(), "EmptyQuery");
+
+        assert_eq!(lexer.next(), Some(Ok(Token::OpenParen)));
+        assert_eq!(lexer.slice(), "(");
+
+        assert_eq!(lexer.next(), Some(Ok(Token::Dollar)));
+        assert_eq!(lexer.slice(), "$");
+
+        assert_eq!(lexer.next(), Some(Ok(Token::Identifier("id"))));
+        assert_eq!(lexer.slice(), "id");
+
+        assert_eq!(lexer.next(), Some(Ok(Token::Colon)));
+        assert_eq!(lexer.slice(), ":");
+
+        assert_eq!(lexer.next(), Some(Ok(Token::Identifier("ID"))));
+        assert_eq!(lexer.slice(), "ID");
+
+        assert_eq!(lexer.next(), Some(Ok(Token::Exclamation)));
+        assert_eq!(lexer.slice(), "!");
+
+        assert_eq!(lexer.next(), Some(Ok(Token::CloseParen)));
+        assert_eq!(lexer.slice(), ")");
+
+        assert_eq!(lexer.next(), Some(Ok(Token::OpenBrace)));
+        assert_eq!(lexer.slice(), "{");
+
+        assert_eq!(lexer.next(), Some(Ok(Token::Identifier("node"))));
+        assert_eq!(lexer.slice(), "node");
+
+        assert_eq!(lexer.next(), Some(Ok(Token::OpenParen)));
+        assert_eq!(lexer.slice(), "(");
+
+        assert_eq!(lexer.next(), Some(Ok(Token::Identifier("id"))));
+        assert_eq!(lexer.slice(), "id");
+
+        assert_eq!(lexer.next(), Some(Ok(Token::Colon)));
+        assert_eq!(lexer.slice(), ":");
+
+        assert_eq!(lexer.next(), Some(Ok(Token::Dollar)));
+        assert_eq!(lexer.slice(), "$");
+
+        assert_eq!(lexer.next(), Some(Ok(Token::Identifier("id"))));
+        assert_eq!(lexer.slice(), "id");
+
+        assert_eq!(lexer.next(), Some(Ok(Token::CloseParen)));
+        assert_eq!(lexer.slice(), ")");
+
+        assert_eq!(lexer.next(), Some(Ok(Token::OpenBrace)));
+        assert_eq!(lexer.slice(), "{");
+
+        assert_eq!(lexer.next(), Some(Ok(Token::Identifier("id"))));
+        assert_eq!(lexer.slice(), "id");
+
+        assert_eq!(lexer.next(), Some(Ok(Token::At)));
+        assert_eq!(lexer.slice(), "@");
+
+        assert_eq!(lexer.next(), Some(Ok(Token::Identifier("skip"))));
+        assert_eq!(lexer.slice(), "skip");
+
+        assert_eq!(lexer.next(), Some(Ok(Token::OpenParen)));
+        assert_eq!(lexer.slice(), "(");
+
+        assert_eq!(lexer.next(), Some(Ok(Token::Identifier("if"))));
+        assert_eq!(lexer.slice(), "if");
+
+        assert_eq!(lexer.next(), Some(Ok(Token::Colon)));
+        assert_eq!(lexer.slice(), ":");
+
+        assert_eq!(lexer.next(), Some(Ok(Token::False)));
+        assert_eq!(lexer.slice(), "false");
+
+        assert_eq!(lexer.next(), Some(Ok(Token::CloseParen)));
+        assert_eq!(lexer.slice(), ")");
+
+        assert_eq!(lexer.next(), Some(Ok(Token::Spread)));
+        assert_eq!(lexer.slice(), "...");
+
+        assert_eq!(lexer.next(), Some(Ok(Token::Identifier("E1"))));
+        assert_eq!(lexer.slice(), "E1");
+
+        assert_eq!(lexer.next(), Some(Ok(Token::CloseBrace)));
+        assert_eq!(lexer.slice(), "}");
+
+        assert_eq!(lexer.next(), Some(Ok(Token::CloseBrace)));
+        assert_eq!(lexer.slice(), "}");
+
+        assert_eq!(lexer.next(), None);
+    }
+
+    #[test]
+    fn test_string_lexing() {
+        let input = r#"
+             "test"
+             "escaped \" quote"
+             "unterminated
+             "
+         "#;
+        let mut lexer = Token::lexer(input);
+
+        assert_eq!(lexer.next(), Some(Ok(Token::StringLiteral("\"test\""))));
+        assert_eq!(lexer.slice(), "\"test\"");
+
+        assert_eq!(
+            lexer.next(),
+            Some(Ok(Token::StringLiteral(r#""escaped \" quote""#)))
+        );
+        assert_eq!(lexer.slice(), r#""escaped \" quote""#);
+
+        assert_eq!(lexer.next(), Some(Err(())));
+        assert_eq!(
+            lexer.extras.error_token,
+            Some(Token::ErrorUnterminatedString)
+        );
+        assert_eq!(lexer.slice(), "\"unterminated");
+    }
+
+    #[test]
+    fn test_invalid_character_lexing() {
+        let input = r#"
+             {
+                 %%%
+                 __typename
+                 *
+             }
+         "#;
+        let mut lexer = Token::lexer(input);
+
+        assert_eq!(lexer.next(), Some(Ok(Token::OpenBrace)));
+        assert_eq!(lexer.slice(), "{");
+
+        assert_eq!(lexer.next(), Some(Err(())));
+        assert_eq!(lexer.slice(), "%");
+
+        assert_eq!(lexer.next(), Some(Err(())));
+        assert_eq!(lexer.slice(), "%");
+
+        assert_eq!(lexer.next(), Some(Err(())));
+        assert_eq!(lexer.slice(), "%");
+
+        assert_eq!(lexer.next(), Some(Ok(Token::Identifier("__typename"))));
+        assert_eq!(lexer.slice(), "__typename");
+
+        assert_eq!(lexer.next(), Some(Err(())));
+        assert_eq!(lexer.slice(), "*");
+
+        assert_eq!(lexer.next(), Some(Ok(Token::CloseBrace)));
+        assert_eq!(lexer.slice(), "}");
+
+        assert_eq!(lexer.next(), None);
+    }
+
+    #[test]
+    fn test_block_string_lexing() {
+        let input = r#"
+             # escaped
+             """tes\"""t"""
+             # empty
+             """"""
+             # 2 quotes in a string
+             """"" """
+             """
+                 multi-
+                 line
+             """
+             """unterminated
+         "#;
+        let mut lexer = Token::lexer(input);
+
+        assert_eq!(
+            lexer.next(),
+            Some(Ok(Token::BlockStringLiteral(r#""""tes\"""t""""#)))
+        );
+        assert_eq!(lexer.slice(), r#""""tes\"""t""""#);
+
+        assert_eq!(
+            lexer.next(),
+            Some(Ok(Token::BlockStringLiteral(r#""""""""#)))
+        );
+        assert_eq!(lexer.slice(), r#""""""""#);
+
+        assert_eq!(
+            lexer.next(),
+            Some(Ok(Token::BlockStringLiteral(r#"""""" """"#)))
+        );
+        assert_eq!(lexer.slice(), r#"""""" """"#);
+
+        assert_eq!(
+            lexer.next(),
+            Some(Ok(Token::BlockStringLiteral(
+                r#""""
+                 multi-
+                 line
+             """"#
+            )))
+        );
+        assert_eq!(
+            lexer.slice(),
+            r#""""
+                 multi-
+                 line
+             """"#
+        );
+
+        assert_eq!(lexer.next(), Some(Err(())));
+        assert_eq!(
+            lexer.extras.error_token,
+            Some(Token::ErrorUnterminatedBlockString)
+        );
+        // Unterminated string just consumes the starting quotes
+        assert_eq!(lexer.slice(), r#"""""#);
+    }
+
+    #[test]
+    fn test_bom_lexing() {
+        let input = "\u{feff}";
+
+        let mut lexer = Token::lexer(input);
+
+        assert_eq!(lexer.next(), None);
+    }
+}
diff --git a/cynic-parser/src/lib.rs b/cynic-parser/src/lib.rs
new file mode 100644
index 00000000..e0ae27de
--- /dev/null
+++ b/cynic-parser/src/lib.rs
@@ -0,0 +1,84 @@
+use lalrpop_util::lalrpop_mod;
+
+mod ast;
+mod lexer;
+mod printer;
+
+pub use lexer::Lexer;
+pub use schema::ObjectDefinitionParser;
+
+// TODO: Make this more senseible
+pub use ast::Ast;
+
+lalrpop_mod!(pub schema);
+
+pub fn parse_type_system_document(input: &str) -> Ast {
+    let lexer = lexer::Lexer::new(input);
+    let mut ast = Ast::new();
+
+    schema::TypeSystemDocumentParser::new()
+        .parse(input, &mut ast, lexer)
+        .expect("TODO: error handling");
+
+    ast
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+
+    #[test]
+    fn it_works() {
+        insta::assert_snapshot!(
+            parse_type_system_document("schema { query:Query }").to_sdl(),
+            @r###"
+        schema {
+          query: Query
+        }
+        "###
+        );
+    }
+
+    #[test]
+    fn test_basic_object() {
+        insta::assert_snapshot!(
+            parse_type_system_document("type MyType @hello { field: Whatever, other: [[Int!]]! }").to_sdl(),
+            @r###"
+        type MyType @hello {
+          field: Whatever
+          other: [[Int!]]!
+        }
+        "###
+        );
+    }
+
+    #[test]
+    fn test_schema_field() {
+        // Use a keyword as a field name and make sure it's fine
+        insta::assert_snapshot!(
+            parse_type_system_document( "type MyType { query: String }").to_sdl(),
+            @r###"
+        type MyType  {
+          query: String
+        }
+        "###
+        )
+    }
+
+    #[test]
+    fn test_input() {
+        insta::assert_snapshot!(
+            parse_type_system_document(
+                r#"
+                "I am a description"
+                input MyType @hello { query: String = "Hello" }
+                "#
+            ).to_sdl(),
+            @r###"
+        input MyType @hello {
+          query: String = "Hello"
+        }
+        "###
+        );
+    }
+}
diff --git a/cynic-parser/src/printer.rs b/cynic-parser/src/printer.rs
new file mode 100644
index 00000000..25ed68e0
--- /dev/null
+++ b/cynic-parser/src/printer.rs
@@ -0,0 +1,195 @@
+use std::fmt::{Display, Write};
+
+use pretty::{Arena, BoxAllocator, DocAllocator, Pretty};
+
+use crate::ast::{
+    ids::{
+        ArgumentId, DirectiveId, FieldDefinitionId, InputObjectDefinitionId,
+        InputValueDefinitionId, ObjectDefinitionId, SchemaDefinitionId, TypeId, ValueId,
+    },
+    AstReader, Definition,
+};
+
+impl crate::Ast {
+    pub fn to_sdl(&self) -> String {
+        let allocator = BoxAllocator;
+
+        let builder = allocator
+            .concat(
+                self.reader()
+                    .definitions()
+                    .map(|definition| match definition {
+                        Definition::Schema(schema) => NodeDisplay(schema).pretty(&allocator),
+                        Definition::Object(object) => NodeDisplay(object).pretty(&allocator),
+                        Definition::InputObject(object) => NodeDisplay(object).pretty(&allocator),
+                    }),
+            )
+            .pretty(&allocator);
+
+        #[allow(clippy::needless_borrow)] // This doesn't work without the borrow :|
+        {
+            format!("{}", (&*builder).pretty(80))
+        }
+    }
+}
+
+pub struct NodeDisplay<'a, T>(AstReader<'a, T>);
+
+impl<'a> Pretty<'a, BoxAllocator> for NodeDisplay<'a, SchemaDefinitionId> {
+    fn pretty(self, allocator: &'a BoxAllocator) -> pretty::DocBuilder<'a, BoxAllocator, ()> {
+        let mut builder = allocator.text("schema");
+        let roots = self.0.root_operations().collect::<Vec<_>>();
+
+        if !roots.is_empty() {
+            builder = builder
+                .append(allocator.space())
+                .append(allocator.text("{"))
+                .append(allocator.hardline())
+                .append("  ")
+                .append(
+                    allocator
+                        .intersperse(
+                            roots.into_iter().map(|(kind, name)| {
+                                allocator.text(kind.to_string()).append(": ").append(name)
+                            }),
+                            allocator.hardline(),
+                        )
+                        .align(),
+                )
+                .append(allocator.hardline())
+                .append(allocator.text("}"))
+        }
+
+        builder
+    }
+}
+
+impl<'a> Pretty<'a, BoxAllocator> for NodeDisplay<'a, ObjectDefinitionId> {
+    fn pretty(self, allocator: &'a BoxAllocator) -> pretty::DocBuilder<'a, BoxAllocator, ()> {
+        allocator
+            .text(format!("type {}", self.0.name()))
+            .append(allocator.space())
+            .append(allocator.intersperse(self.0.directives().map(NodeDisplay), allocator.line()))
+            .append(allocator.space())
+            .append(allocator.text("{"))
+            .append(allocator.hardline())
+            .append(allocator.text("  "))
+            .append(
+                allocator
+                    .intersperse(self.0.fields().map(NodeDisplay), allocator.hardline())
+                    .align(),
+            )
+            .append(allocator.hardline())
+            .append(allocator.text("}"))
+    }
+}
+
+impl<'a> Pretty<'a, BoxAllocator> for NodeDisplay<'a, FieldDefinitionId> {
+    fn pretty(self, allocator: &'a BoxAllocator) -> pretty::DocBuilder<'a, BoxAllocator, ()> {
+        allocator
+            .text(self.0.name().to_string())
+            .append(allocator.text(":"))
+            .append(allocator.space())
+            .append(NodeDisplay(self.0.ty()))
+    }
+}
+
+impl<'a> Pretty<'a, BoxAllocator> for NodeDisplay<'a, InputObjectDefinitionId> {
+    fn pretty(self, allocator: &'a BoxAllocator) -> pretty::DocBuilder<'a, BoxAllocator, ()> {
+        allocator
+            .text(format!("input {}", self.0.name()))
+            .append(allocator.space())
+            .append(allocator.intersperse(self.0.directives().map(NodeDisplay), allocator.line()))
+            .append(allocator.space())
+            .append(allocator.text("{"))
+            .append(allocator.hardline())
+            .append(allocator.text("  "))
+            .append(
+                allocator
+                    .intersperse(self.0.fields().map(NodeDisplay), allocator.hardline())
+                    .align(),
+            )
+            .append(allocator.hardline())
+            .append(allocator.text("}"))
+    }
+}
+
+impl<'a> Pretty<'a, BoxAllocator> for NodeDisplay<'a, InputValueDefinitionId> {
+    fn pretty(self, allocator: &'a BoxAllocator) -> pretty::DocBuilder<'a, BoxAllocator, ()> {
+        let mut builder = allocator
+            .text(self.0.name().to_string())
+            .append(allocator.text(":"))
+            .append(allocator.space())
+            .append(NodeDisplay(self.0.ty()));
+
+        if let Some(value) = self.0.default_value() {
+            builder = builder
+                .append(allocator.space())
+                .append(allocator.text("="))
+                .append(allocator.space())
+                .append(NodeDisplay(value));
+        }
+
+        builder
+    }
+}
+
+impl<'a> Pretty<'a, BoxAllocator> for NodeDisplay<'a, TypeId> {
+    fn pretty(self, allocator: &'a BoxAllocator) -> pretty::DocBuilder<'a, BoxAllocator, ()> {
+        allocator.text(self.0.to_string())
+    }
+}
+
+impl<'a> Pretty<'a, BoxAllocator> for NodeDisplay<'a, DirectiveId> {
+    fn pretty(self, allocator: &'a BoxAllocator) -> pretty::DocBuilder<'a, BoxAllocator, ()> {
+        let mut builder = allocator.text(format!("@{}", self.0.name()));
+
+        let arguments = self.0.arguments().collect::<Vec<_>>();
+        if !arguments.is_empty() {
+            builder = builder.append(
+                allocator
+                    .intersperse(arguments.into_iter().map(NodeDisplay), ", ")
+                    .parens(),
+            );
+        }
+        builder
+    }
+}
+
+impl<'a> Pretty<'a, BoxAllocator> for NodeDisplay<'a, ArgumentId> {
+    fn pretty(self, allocator: &'a BoxAllocator) -> pretty::DocBuilder<'a, BoxAllocator, ()> {
+        allocator
+            .text(self.0.name().to_string())
+            .append(allocator.text(":"))
+            .append(allocator.space())
+            .append(NodeDisplay(self.0.value()))
+    }
+}
+
+impl<'a> Pretty<'a, BoxAllocator> for NodeDisplay<'a, ValueId> {
+    fn pretty(self, allocator: &'a BoxAllocator) -> pretty::DocBuilder<'a, BoxAllocator, ()> {
+        match self.0.value() {
+            crate::ast::ValueReader::Variable(name) => allocator.text(format!("${name}")),
+            crate::ast::ValueReader::Int(value) => allocator.text(format!("{value}")),
+            crate::ast::ValueReader::Float(value) => allocator.text(format!("{value}")),
+            crate::ast::ValueReader::String(value) => allocator.text(value.to_string()),
+            crate::ast::ValueReader::Boolean(value) => allocator.text(format!("{value}")),
+            crate::ast::ValueReader::Null => allocator.text("null"),
+            crate::ast::ValueReader::Enum(value) => allocator.text(value.to_string()),
+            crate::ast::ValueReader::List(items) => allocator
+                .intersperse(items.into_iter().map(NodeDisplay), ",")
+                .parens(),
+            crate::ast::ValueReader::Object(items) => allocator
+                .intersperse(
+                    items.into_iter().map(|(name, value)| {
+                        allocator
+                            .text(name)
+                            .append(allocator.text(":"))
+                            .append(NodeDisplay(value))
+                    }),
+                    ",",
+                )
+                .braces(),
+        }
+    }
+}
diff --git a/cynic-parser/src/schema.lalrpop b/cynic-parser/src/schema.lalrpop
new file mode 100644
index 00000000..f48e1f79
--- /dev/null
+++ b/cynic-parser/src/schema.lalrpop
@@ -0,0 +1,202 @@
+use std::str::FromStr;
+
+use crate::lexer;
+
+use crate::ast::{*, ids::*};
+
+grammar<'input>(input: &'input str, ast: &mut Ast);
+
+pub TypeSystemDocument: () = {
+    <defs:DefinitionAndDescription+> => ast.definitions(defs),
+}
+
+pub DefinitionAndDescription: (Option<NodeId>, NodeId) = {
+    <description:StringValue?> <def:TypeSystemDefinition> => (description, def)
+}
+
+pub TypeSystemDefinition: NodeId = {
+    <def:SchemaDefinition> => ast.schema_definition(def),
+    <def:ObjectDefinition> => ast.object_definition(def),
+    <def:InputObjectDefinition> => ast.input_object_definition(def)
+}
+
+pub SchemaDefinition: SchemaDefinition = {
+    schema "{" <roots:RootOperationTypeDefinition*> "}" => SchemaDefinition { <> }
+};
+
+pub RootOperationTypeDefinition: RootOperationTypeDefinition = {
+    query ":" <name:NamedType> => RootOperationTypeDefinition {
+        operation_type: OperationType::Query,
+        named_type: name
+    }
+}
+
+pub ObjectDefinition: ObjectDefinition = {
+    ty <name:Name> <directives:Directive*> <fields:FieldsDefinition?> => ObjectDefinition {
+        name,
+        directives,
+        fields: fields.unwrap_or_default ()
+    }
+};
+
+FieldsDefinition: Vec<NodeId> = {
+    "{" <fields:FieldDefinition+> "}" => fields
+};
+
+FieldDefinition: NodeId = {
+    <description:StringValue?> <name:Name> <arguments:ArgumentsDefinition?> ":" <ty:Type> => ast.field_definition(FieldDefinition {
+        name,
+        ty,
+        arguments: arguments.unwrap_or_default(),
+        description,
+    })
+};
+
+ArgumentsDefinition: Vec<NodeId> = {
+    "(" <arguments:InputValueDefinition+> ")" => arguments,
+};
+
+pub InputObjectDefinition: InputObjectDefinition = {
+    input <name:Name> <directives:Directive*> <fields:InputFieldsDefinition?> => InputObjectDefinition {
+        name,
+        directives,
+        fields: fields.unwrap_or_default()
+    }
+};
+
+InputFieldsDefinition: Vec<NodeId> = {
+    "{" <fields:InputValueDefinition+> "}" => fields
+};
+
+// TODO: Add directives
+InputValueDefinition: NodeId =
+    <description:StringValue?> <name:Name> ":" <ty:Type> <default:DefaultValue?> => ast.input_value_definition(InputValueDefinition { <> });
+
+DefaultValue: ValueId = {
+    "=" <v:Value> => v
+}
+
+Name: StringId = <s:Ident> => ast.ident(s);
+
+NamedType: StringId = <s:Ident> => ast.ident(s);
+
+Type: TypeId = {
+    "[" <ty:Type> => ty,
+    <name:NamedType> <wrappers:WrappingType*> => ast.type_reference(Type { <> })
+}
+
+WrappingType: WrappingType = {
+    "!" => WrappingType::NonNull,
+    "]" => WrappingType::List
+}
+
+Value: ValueId = {
+    "$" <name:Name> => ast.value(Value::Variable(name)),
+    <int:IntegerLiteral> => ast.value(Value::Int(int.parse().unwrap())),
+    <float:FloatLiteral> => ast.value(Value::Int(float.parse().unwrap())),
+    <s:StringLiteral> => {
+        let id = ast.intern_string(s);
+        ast.value(Value::String(id))
+    },
+    <s:BlockStringLiteral> => {
+        let id = ast.intern_string(s);
+        ast.value(Value::String(id))
+    },
+    true => ast.value(Value::Boolean(true)),
+    false => ast.value(Value::Boolean(false)),
+    null => ast.value(Value::Null),
+    "[" <values:Value*> "]" => ast.value(Value::List(values)),
+    "{" <fields:ObjectField*> "}" => ast.value(Value::Object(fields)),
+    <value:EnumValue> => ast.value(Value::Enum(value)),
+}
+
+ObjectField: (StringId, ValueId) = {
+    <name:Name> ":" <value:Value> => (name, value)
+}
+
+StringValue: NodeId = {
+    <s:StringLiteral> => {
+        let id = ast.intern_string(s);
+        ast.string_literal(StringLiteral::Normal(id))
+    },
+    <s:BlockStringLiteral> => {
+        let id = ast.intern_string(s);
+        ast.string_literal(StringLiteral::Block(id))
+    },
+}
+
+EnumValue: StringId = {
+    <s:RawIdent> => ast.ident(s),
+    schema => ast.ident("schema"),
+    query => ast.ident("query"),
+    ty => ast.ident("type"),
+    input => ast.ident("input"),
+}
+
+Directive: DirectiveId = {
+    "@" <name:Name> <arguments:Arguments?> => ast.directive(Directive {
+        name,
+        arguments: arguments.unwrap_or_default()
+    })
+}
+
+Arguments: Vec<ArgumentId> = {
+    "(" <arguments:Argument*> ")" => arguments
+}
+
+Argument: ArgumentId = {
+    <name:Name> ":" <value:Value> => ast.argument(Argument { <> }),
+}
+
+// TODO: Make this NodeId probably...
+Ident: &'input str = {
+    <s:RawIdent> => s,
+    schema => "schema",
+    query => "query",
+    ty => "type",
+    input => "input",
+    true => "true",
+    false => "false",
+    null => "null",
+}
+
+extern {
+    type Location = usize;
+    type Error = lexer::LexicalError;
+
+    enum lexer::Token<'input> {
+        "$" => lexer::Token::Dollar,
+
+        ":" => lexer::Token::Colon,
+
+        "{" => lexer::Token::OpenBrace,
+        "}" => lexer::Token::CloseBrace,
+
+        "(" => lexer::Token::OpenParen,
+        ")" => lexer::Token::CloseParen,
+
+        "[" => lexer::Token::OpenBracket,
+        "]" => lexer::Token::CloseBracket,
+
+        "!" => lexer::Token::Exclamation,
+        "=" => lexer::Token::Equals,
+        "@" => lexer::Token::At,
+
+        RawIdent => lexer::Token::Identifier(<&'input str>),
+
+        StringLiteral => lexer::Token::StringLiteral(<&'input str>),
+        BlockStringLiteral => lexer::Token::BlockStringLiteral(<&'input str>),
+        FloatLiteral => lexer::Token::FloatLiteral(<&'input str>),
+        IntegerLiteral => lexer::Token::FloatLiteral(<&'input str>),
+
+        // Would be nice if these could just be aliases of `Identifier` but LARLPOP doesn't
+        // seem to support this well: https://github.com/lalrpop/lalrpop/issues/671
+        schema => lexer::Token::Schema,
+        query => lexer::Token::Query,
+        ty => lexer::Token::Type,
+        input => lexer::Token::Input,
+        true => lexer::Token::True,
+        false => lexer::Token::False,
+        null => lexer::Token::Null,
+    }
+}
\ No newline at end of file