diff --git a/.gitignore b/.gitignore index 96ef6c0..bf197f9 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,6 @@ /target Cargo.lock +*.profraw +/*.json +/examples/cache.json +/examples/local_cache.json diff --git a/Cargo.toml b/Cargo.toml index c046c94..2ce2b99 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -3,12 +3,33 @@ name = "cryptoscript" version = "0.1.0" edition = "2018" -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +[[bin]] +name = "cryptoscript" +path = "src/main.rs" + +[[bin]] +name = "rest-api" +path = "src/rest_api.rs" [dependencies] -sha2 = "0.9" -sha3 = "0.9" -hex-literal = "0.3" +actix-web = { version = "4.0.1", default-features = false, features = ["macros"] } + +clap = { version = "3.1.6", features = ["derive"] } +enumset = { version = "1.0.8", features = ["serde"] } +futures = { version = "0.3.21", features = ["executor", "thread-pool"] } generic-array = "0.14" hex = "0.4" +hex-literal = "0.3" +indexmap = "1.5" +k256 = { version = "0.10.2", features = ["std", "ecdsa", "serde"] } +quickcheck = "1.0.3" +quickcheck_macros = "1.0.0" +reqwest = { version = "0.11.10", features = ["json"] } +serde = { version = "1.0", features = ["derive"] } +serde_json = { version = "1.0.79", features = ["arbitrary_precision", "preserve_order"] } +sha2 = "0.9" +sha3 = "0.9" +tokio = { version = "1.17.0", features = ["macros", "rt-multi-thread"] } +tokio-stream = "0.1.8" thiserror = "1.0" +typenum = "1.15.0" diff --git a/README.md b/README.md index 69a4c36..94e9f82 100644 --- a/README.md +++ b/README.md @@ -79,3 +79,65 @@ the presenter "clears" a puzzle to get authorized. cryptoscript is extensible with modules to support all blockchain networks and off-chain data. This policy-as-code primitive can create infinite matching representations. +## Demo + +There are two demos: +- Local: this demo is self-contained to not require any API keys +- Etherscan: this demo requires a free Etherscan API key, which you can get + [here](https://docs.etherscan.io/getting-started/viewing-api-usage-statistics) + +### Local Demo + +The local demo requires running a tiny test server, which can be started with the following command: + +```bash +cargo run --bin rest-api +``` + +Note: this API accepts PUT's of new GET "API's" for testing: each requires a +fixed `application/json` request body and returns a fixed `application/json` +response. + +To run the demo itself, run: + +```bash +cargo r --bin cryptoscript -- \ + --code examples/local_demo_code.json \ + --cache-location examples/local_cache.json \ + --input examples/input.json \ + --queries examples/local_query.json \ + --variables '{ + "contractaddress": "0x57d90b64a1a57749b0f932f1a3395792e12e7055", + "address": "0xe04f27eb70e025b78871a2ad7eabe85e61212761", + "apikey": "DUMMY_ETHERSCAN_API_KEY" }' +``` + +You'll see `successful!` if it completes without any errors. + +### Etherscan Demo + +*NOTE: this demo currently ignores any errors from Etherscan.* + +This demo requires a free Etherscan API key, which you can get +[here](https://docs.etherscan.io/getting-started/viewing-api-usage-statistics) + +Once you have an API key, replace `YOUR_ETHERSCAN_API_KEY` below with your API +key from Etherscan to run the demo: + +```bash +cargo r --bin cryptoscript -- \ + --code examples/demo_code.json \ + --cache-location examples/cache.json \ + --input examples/input.json \ + --queries examples/query.json \ + --variables '{ + "contractaddress": "0x57d90b64a1a57749b0f932f1a3395792e12e7055", + "address": "0xe04f27eb70e025b78871a2ad7eabe85e61212761", + "apikey": "YOUR_ETHERSCAN_API_KEY" }' +``` + +### Troubleshooting Demo's + +If you have any issues, make sure to clear any `cache.json` files to ensure +you're receiving fresh query responses. + diff --git a/examples/demo_code.json b/examples/demo_code.json new file mode 100644 index 0000000..00f3bf3 --- /dev/null +++ b/examples/demo_code.json @@ -0,0 +1,93 @@ +{ + "instructions": [ + { + "Restack": { + "restack_depth": 1, + "restack_vec": [] + } + }, + { + "UnpackJson": "Object" + }, + { + "Push": { + "String": "queries" + } + }, + "Lookup", + { + "UnpackJson": "Array" + }, + { + "Push": { + "Number": 0 + } + }, + "Index", + { + "UnpackJson": "Object" + }, + { + "Restack": { + "restack_depth": 1, + "restack_vec": [ + 0, + 0 + ] + } + }, + { + "Push": { + "String": "action" + } + }, + "Lookup", + { + "UnpackJson": "String" + }, + { + "Push": { + "String": "tokenbalance" + } + }, + "StringEq", + "AssertTrue", + { + "Restack": { + "restack_depth": 1, + "restack_vec": [] + } + }, + { + "Restack": { + "restack_depth": 1, + "restack_vec": [ + 0, + 0 + ] + } + }, + { + "Push": { + "String": "contractaddress" + } + }, + "Lookup", + { + "UnpackJson": "String" + }, + { + "Push": { + "String": "0x57d90b64a1a57749b0f932f1a3395792e12e7055" + } + }, + "StringEq", + "AssertTrue", + { + "Restack": { + "restack_depth": 1, + "restack_vec": [] + } + } + ] +} diff --git a/examples/input.json b/examples/input.json new file mode 100644 index 0000000..2406786 --- /dev/null +++ b/examples/input.json @@ -0,0 +1,40 @@ +{ + "queries": [ + { + "uri": "https://api.etherscan.io/api", + "module": "account", + "action": "tokenbalance", + "contractaddress": "0x57d90b64a1a57749b0f932f1a3395792e12e7055", + "address": "0xe04f27eb70e025b78871a2ad7eabe85e61212761", + "tag": "latest", + "blockno": "8000000", + "apikey": "YourApiKeyToken", + "response": + { + "status": "1", + "message": "OK", + "result": "135499" + } + } + ], + "prompts": [ + { + "action": "siwe", + "version": "1.1.0", + "data": { + "message": "service.org wants you to sign in with your Ethereum account:\n0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2\n\nI accept the ServiceOrg Terms of Service: https://service.org/tos\n\nURI: https://service.org/login\nVersion: 1\nChain ID: 1\nNonce: 32891757\nIssued At: 2021-09-30T16:25:24.000Z\nResources:\n- ipfs://Qme7ss3ARVgxv6rXqVPiikMJ8u2NLgmgszg13pYrDKEoiu\n- https://example.com/my-web2-claim.json", + "fields": { + "domain": "service.org", + "address": "0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2", + "statement": "I accept the ServiceOrg Terms of Service: https://service.org/tos", + "uri": "https://service.org/login", + "version": "1", + "chainId": 1, + "nonce": "32891757", + "issuedAt": "2021-09-30T16:25:24.000Z", + "resources": ["ipfs://Qme7ss3ARVgxv6rXqVPiikMJ8u2NLgmgszg13pYrDKEoiu", "https://example.com/my-web2-claim.json"] + } + } + } + ] +} diff --git a/examples/local_demo_code.json b/examples/local_demo_code.json new file mode 100644 index 0000000..af18c86 --- /dev/null +++ b/examples/local_demo_code.json @@ -0,0 +1,99 @@ +{ + "instructions": [ + { + "Restack": { + "restack_depth": 1, + "restack_vec": [] + } + }, + { + "Restack": { + "restack_depth": 1, + "restack_vec": [] + } + }, + { + "UnpackJson": "Object" + }, + { + "Push": { + "String": "queries" + } + }, + "Lookup", + { + "UnpackJson": "Array" + }, + { + "Push": { + "Number": 0 + } + }, + "Index", + { + "UnpackJson": "Object" + }, + { + "Restack": { + "restack_depth": 1, + "restack_vec": [ + 0, + 0 + ] + } + }, + { + "Push": { + "String": "action" + } + }, + "Lookup", + { + "UnpackJson": "String" + }, + { + "Push": { + "String": "tokenbalance" + } + }, + "StringEq", + "AssertTrue", + { + "Restack": { + "restack_depth": 1, + "restack_vec": [] + } + }, + { + "Restack": { + "restack_depth": 1, + "restack_vec": [ + 0, + 0 + ] + } + }, + { + "Push": { + "String": "contractaddress" + } + }, + "Lookup", + { + "UnpackJson": "String" + }, + { + "Push": { + "String": "0x57d90b64a1a57749b0f932f1a3395792e12e7055" + } + }, + "StringEq", + "AssertTrue", + { + "Restack": { + "restack_depth": 1, + "restack_vec": [] + } + } + ] +} diff --git a/examples/local_query.json b/examples/local_query.json new file mode 100644 index 0000000..d6b1416 --- /dev/null +++ b/examples/local_query.json @@ -0,0 +1,82 @@ +{ + "queries": [ + { + "name": "setup_erc20", + "url": "http://127.0.0.1:8080/apis/erc20", + "template": { + "Object": { + "request": { + "Object": { + "module": { + "String": "account" + }, + "action": { + "String": "tokenbalance" + }, + "contractaddress": { + "String": "0x57d90b64a1a57749b0f932f1a3395792e12e7055" + }, + "address": { + "String": "0xe04f27eb70e025b78871a2ad7eabe85e61212761" + }, + "tag": { + "String": "latest" + }, + "apikey": { + "String": "DUMMY_ETHERSCAN_API_KEY" + } + } + }, + "response": { + "Object": { + "status": { + "String": "1" + }, + "message": { + "String": "OK" + }, + "result": { + "String": "135499" + } + } + }, + "rate_limit_seconds": { + "Number": 1 + }, + "last_api_call": "Null" + } + }, + "cached": true, + "query_type": "Put" + }, + + { + "name": "erc20", + "url": "http://127.0.0.1:8080/apis/erc20", + "template": { + "Object": { + "module": { + "String": "account" + }, + "action": { + "String": "tokenbalance" + }, + "contractaddress": { + "Var": "contractaddress" + }, + "address": { + "Var": "address" + }, + "tag": { + "String": "latest" + }, + "apikey": { + "Var": "apikey" + } + } + }, + "cached": true, + "query_type": "Get" + } + ] +} diff --git a/examples/query.json b/examples/query.json new file mode 100644 index 0000000..f2ce857 --- /dev/null +++ b/examples/query.json @@ -0,0 +1,32 @@ +{ + "queries": [ + { + "name": "erc20", + "url": "https://api.etherscan.io/api", + "template": { + "Object": { + "module": { + "String": "account" + }, + "action": { + "String": "tokenbalance" + }, + "contractaddress": { + "Var": "contractaddress" + }, + "address": { + "Var": "address" + }, + "tag": { + "String": "latest" + }, + "apikey": { + "Var": "apikey" + } + } + }, + "cached": true, + "query_type": "Get" + } + ] +} diff --git a/src/an_elem.rs b/src/an_elem.rs new file mode 100644 index 0000000..6e568bf --- /dev/null +++ b/src/an_elem.rs @@ -0,0 +1,234 @@ +use crate::elem::{Elem, ElemSymbol}; + +use thiserror::Error; + +use std::fmt::Debug; +use std::marker::PhantomData; + +use enumset::EnumSet; +use serde_json::{Map, Number, Value}; + +/// Valid Elem(ent) types +/// +/// TODO: make closed +pub trait AnElem: Clone + Debug + PartialEq { + // TODO: rename? + + // fn elem_symbol(t: PhantomData) -> ElemType; + /// The ElemSymbol's associated with the Elem's that can form this type + fn elem_symbol(t: PhantomData) -> EnumSet; + + /// Convert the Self to Elem by using one of Elem's constructors + fn to_elem(self) -> Elem; + + /// Convert the given Elem to Self through pattern-matching + fn from_elem(t: PhantomData, x: Elem) -> Result; +} + +impl AnElem for Elem { + fn elem_symbol(_t: PhantomData) -> EnumSet { + EnumSet::all() + } + + fn to_elem(self) -> Elem { + self + } + + fn from_elem(_t: PhantomData, x: Elem) -> Result { + Ok(x) + } +} + + +impl AnElem for () { + fn elem_symbol(_t: PhantomData) -> EnumSet { + EnumSet::only(ElemSymbol::Unit) + } + + fn to_elem(self) -> Elem { + Elem::Unit + } + + fn from_elem(_t: PhantomData, x: Elem) -> Result { + let elem_symbol = ::elem_symbol(PhantomData); + match x { + Elem::Unit => Ok(()), + _ => Err(AnElemError::UnexpectedElemType { + expected: elem_symbol, + found: x, + }), + } + } +} + +impl AnElem for bool { + fn elem_symbol(_t: PhantomData) -> EnumSet { + EnumSet::only(ElemSymbol::Bool) + } + + fn to_elem(self) -> Elem { + Elem::Bool(self) + } + + fn from_elem(_t: PhantomData, x: Elem) -> Result { + let elem_symbol = ::elem_symbol(PhantomData); + match x { + Elem::Bool(y) => Ok(y), + _ => Err(AnElemError::UnexpectedElemType { + expected: elem_symbol, + found: x, + }), + } + } +} + +impl AnElem for Number { + fn elem_symbol(_t: PhantomData) -> EnumSet { + EnumSet::only(ElemSymbol::Number) + } + + fn to_elem(self) -> Elem { + Elem::Number(self) + } + + fn from_elem(_t: PhantomData, x: Elem) -> Result { + let elem_symbol = ::elem_symbol(PhantomData); + match x { + Elem::Number(y) => Ok(y), + _ => Err(AnElemError::UnexpectedElemType { + expected: elem_symbol, + found: x, + }), + } + } +} + +impl AnElem for Vec { + fn elem_symbol(_t: PhantomData) -> EnumSet { + EnumSet::only(ElemSymbol::Bytes) + } + + fn to_elem(self) -> Elem { + Elem::Bytes(self) + } + + fn from_elem(_t: PhantomData, x: Elem) -> Result { + let elem_symbol = ::elem_symbol(PhantomData); + match x { + Elem::Bytes(y) => Ok(y), + _ => Err(AnElemError::UnexpectedElemType { + expected: elem_symbol, + found: x, + }), + } + } +} + +impl AnElem for String { + fn elem_symbol(_t: PhantomData) -> EnumSet { + EnumSet::only(ElemSymbol::String) + } + + fn to_elem(self) -> Elem { + Elem::String(self) + } + + fn from_elem(_t: PhantomData, x: Elem) -> Result { + let elem_symbol = ::elem_symbol(PhantomData); + match x { + Elem::String(y) => Ok(y), + _ => Err(AnElemError::UnexpectedElemType { + expected: elem_symbol, + found: x, + }), + } + } +} + +impl AnElem for Vec { + fn elem_symbol(_t: PhantomData) -> EnumSet { + EnumSet::only(ElemSymbol::Array) + } + + fn to_elem(self) -> Elem { + Elem::Array(self) + } + + fn from_elem(_t: PhantomData, x: Elem) -> Result { + let elem_symbol = ::elem_symbol(PhantomData); + match x { + Elem::Array(y) => Ok(y), + _ => Err(AnElemError::UnexpectedElemType { + expected: elem_symbol, + found: x, + }), + } + } +} + +impl AnElem for Map { + fn elem_symbol(_t: PhantomData) -> EnumSet { + EnumSet::only(ElemSymbol::Object) + } + + fn to_elem(self) -> Elem { + Elem::Object(self) + } + + fn from_elem(_t: PhantomData, x: Elem) -> Result { + let elem_symbol = ::elem_symbol(PhantomData); + match x { + Elem::Object(y) => Ok(y), + _ => Err(AnElemError::UnexpectedElemType { + expected: elem_symbol, + found: x, + }), + } + } +} + +impl AnElem for Value { + fn elem_symbol(_t: PhantomData) -> EnumSet { + EnumSet::only(ElemSymbol::Json) + } + + fn to_elem(self) -> Elem { + Elem::Json(self) + } + + fn from_elem(_t: PhantomData, x: Elem) -> Result { + let elem_symbol = ::elem_symbol(PhantomData); + match x { + Elem::Json(y) => Ok(y), + _ => Err(AnElemError::UnexpectedElemType { + expected: elem_symbol, + found: x, + }), + } + } +} + + +/// AnElem::from_elem errors +#[derive(Clone, Debug, Error)] +pub enum AnElemError { + /// AnElem::from_elem: element popped from the Stack wasn't the expected type + #[error("AnElem::from_elem: element popped from the stack\n\n{found}\n\nwasn't the expected type:\n{expected:?}")] + UnexpectedElemType { + /// ElemSymbol's expected to be popped from the Stack + expected: EnumSet, + + /// Elem popped from the Stack + found: Elem, + }, + + /// Converting Elem to Or failed + #[error(" as AnElem>::from_elem: {e_hd:?}\n{e_tl:?}")] + PopOr { + /// x in Or + e_hd: Box, + + /// y in Or + e_tl: Box, + }, +} diff --git a/src/an_elem_return.rs b/src/an_elem_return.rs new file mode 100644 index 0000000..c1e7e74 --- /dev/null +++ b/src/an_elem_return.rs @@ -0,0 +1,47 @@ +use crate::an_elem::AnElem; + +use std::sync::{Arc, Mutex}; + +/// AnElem that can be returned, e.g. using IOElems or IOList. +/// +/// In other words, a "typed return slot": use Return::new() to initialize an empty slot +#[derive(Clone, Debug)] +pub struct Return { + return_value: Arc>>, +} + +impl Return { + /// New Return slot with nothing in it + pub fn new() -> Self { + Return { + return_value: Arc::new(Mutex::new(None)), + } + } + + // TODO: throw error if try_lock fails + /// Return the given return_value, overwriting any existing value. + /// + /// Panics if Mutex::try_lock fails + pub fn returning(&self, return_value: T) { + let mut lock = (*self.return_value).try_lock(); + if let Ok(ref mut mutex) = lock { + **mutex = Some(return_value) + } else { + panic!("returning: try_lock failed") + } + } + + // TODO: throw error if try_lock fails + /// The stored return_value, or None if nothing has been returned yet. + /// + /// Panics if Mutex::try_lock fails + pub fn returned(&self) -> Option { + let mut lock = (*self.return_value).try_lock(); + if let Ok(ref mut mutex) = lock { + (**mutex).clone() + } else { + panic!("returning: try_lock failed") + } + } +} + diff --git a/src/arbitrary.rs b/src/arbitrary.rs new file mode 100644 index 0000000..1d8fb01 --- /dev/null +++ b/src/arbitrary.rs @@ -0,0 +1,105 @@ +use serde::{Deserialize, Serialize}; +use serde_json::{Map, Number, Value}; +use quickcheck::{empty_shrinker, Arbitrary, Gen}; + +/// Wrapped Number for Arbitrary generation +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub struct ArbitraryNumber { + /// Wrapped Number + pub number: Number, +} + +impl Arbitrary for ArbitraryNumber { + fn arbitrary(g: &mut Gen) -> Self { + if Arbitrary::arbitrary(g) { + if Arbitrary::arbitrary(g) { + let x: u64 = Arbitrary::arbitrary(g); + ArbitraryNumber { number: + From::from(x) + } + } else { + let x: i64 = Arbitrary::arbitrary(g); + ArbitraryNumber { number: + From::from(x) + } + } + } else { + let x: f64 = Arbitrary::arbitrary(g); + ArbitraryNumber { number: + Number::from_f64(x).unwrap_or(From::from(0u8)) + } + } + } + + fn shrink(&self) -> Box> { + match self.number.as_f64() { + None => match self.number.as_u64() { + None => match self.number.as_i64() { + None => empty_shrinker(), + Some(self_i64) => Box::new( + self_i64.shrink() + .map(|x| ArbitraryNumber { + number: From::from(x), + })), + }, + Some(self_u64) => Box::new( + self_u64.shrink() + .map(|x| ArbitraryNumber { + number: From::from(x), + })), + }, + Some(self_f64) => Box::new( + self_f64.shrink() + .map(|x| ArbitraryNumber { + number: Number::from_f64(x).unwrap_or(From::from(0u8)), + })), + } + } +} + + +/// Wrapped Map, encoded as a Vec of (key, value) pairs, for Arbitrary generation +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct ArbitraryMap { + /// Map encoded as a Vec of (key, value) pairs + pub map: Vec<(String, Value)>, +} + +impl From for Map { + fn from(x: ArbitraryMap) -> Self { + x.map.into_iter().collect() + } +} + +impl Arbitrary for ArbitraryMap { + fn arbitrary(g: &mut Gen) -> Self { + let map_vec: Vec<(String, ArbitraryValue)> = Arbitrary::arbitrary(g); + ArbitraryMap { + map: map_vec.into_iter().map(|x| (x.0, x.1.value)).collect(), + } + } + + fn shrink(&self) -> Box> { + empty_shrinker() + } +} + + +/// Wrapped Value for Arbitrary generation +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct ArbitraryValue { + /// Wrapped Value + pub value: Value, +} + +impl Arbitrary for ArbitraryValue { + fn arbitrary(_g: &mut Gen) -> Self { + ArbitraryValue { + value: Value::Null, + } + } + + fn shrink(&self) -> Box> { + empty_shrinker() + } +} diff --git a/src/cli.rs b/src/cli.rs new file mode 100644 index 0000000..ddc4886 --- /dev/null +++ b/src/cli.rs @@ -0,0 +1,219 @@ +use crate::elem_type::StackType; +use crate::stack::Stack; +use crate::elems::ElemsPopError; +use crate::untyped_instruction::InstructionError; +use crate::typed_instruction::StackInstructionError; +use crate::typed_instrs::Instrs; +use crate::parse::{parse_json, ParseError}; +use crate::query::{QueryError, QueryTemplates}; + +use std::fs; +use std::io; +use std::path::PathBuf; +use std::sync::Arc; + +use clap::{Parser, Subcommand}; +use serde_json::Value; +use thiserror::Error; + +/// Command line interface arguments +/// +/// Runs the given code (parsed as JSON) on the given input (parsed as JSON) +/// and the queries (parsed as JSON into a template +#[derive(Debug, Parser)] +#[clap(author, version, about, long_about = None)] +pub struct Cli { + /// QueryTemplates to run + #[clap(short, long, parse(from_os_str), value_name = "FILE")] + queries: PathBuf, + + /// Query cache json file + #[clap(long, parse(from_os_str), value_name = "FILE")] + cache_location: PathBuf, + + /// Query variables (in JSON) + #[clap(short, long)] + variables: String, + + /// Cryptoscript program to run + #[clap(short, long, parse(from_os_str), value_name = "FILE")] + code: PathBuf, + + /// JSON input + #[clap(short, long, parse(from_os_str), value_name = "FILE")] + input: Option, + + /// Subcommand + #[clap(subcommand)] + command: Option, +} + +/// Command line interface subcommands (optional) +#[derive(Debug, Subcommand)] +enum Commands { + /// Parse only + Parse, + + /// Type check only (monomorphic) + TypeMono, + + // // TODO: implement /// Type check only (polymorphic) + // Type, +} + +#[derive(Clone, Debug, Error)] +pub enum CliError { + #[error("Cli::get_input: invalid input path:\n{input_path:?}")] + InvalidInputPath { + input_path: Option, + }, + + #[error("ElemsPopError:\n{0}")] + ElemsPopError(ElemsPopError), + + #[error("QueryError:\n{0}")] + QueryError(QueryError), + + #[error("StackInstructionError:\n{0}")] + StackInstructionError(StackInstructionError), + + #[error("InstructionError:\n{0}")] + InstructionError(InstructionError), + + #[error("ParseError:\n{0}")] + ParseError(Arc), + + #[error("std::io::Error:\n{0}")] + IOError(Arc), + + #[error("Cli::get_input: serde_json::from_str threw error:\n{0}")] + SerdeJsonError(Arc), +} + +impl From for CliError { + fn from(error: ElemsPopError) -> Self { + Self::ElemsPopError(error) + } +} + +impl From for CliError { + fn from(error: QueryError) -> Self { + Self::QueryError(error) + } +} + +impl From for CliError { + fn from(error: StackInstructionError) -> Self { + Self::StackInstructionError(error) + } +} + +impl From for CliError { + fn from(error: InstructionError) -> Self { + Self::InstructionError(error) + } +} + +impl From for CliError { + fn from(error: ParseError) -> Self { + Self::ParseError(Arc::new(error)) + } +} +impl From for CliError { + fn from(error: io::Error) -> Self { + Self::IOError(Arc::new(error)) + } +} + +impl From for CliError { + fn from(error: serde_json::Error) -> Self { + Self::SerdeJsonError(Arc::new(error)) + } +} + +impl Cli { + /// Get queries from self.queries PathBuf and parse JSON + pub fn parse_queries(&self) -> Result { + let queries_str = fs::read_to_string(self.queries.clone())?; + let queries: QueryTemplates = serde_json::from_str(&queries_str)?; + Ok(queries) + } + + /// Get code from self.code PathBuf and parse JSON + pub fn parse_code(&self) -> Result { + let instructions_str = fs::read_to_string(self.code.clone())?; + Ok(parse_json(&instructions_str)?.to_instrs()?) + } + + /// Get input from self.input PathBuf and parse JSON + pub fn get_input(&self) -> Result { + if let Some(input_path) = self.input.as_deref() { + let input_str = fs::read_to_string(input_path)?; + Ok(serde_json::from_str(&input_str)?) + } else { + Err(CliError::InvalidInputPath { + input_path: self.input.clone(), + }) + } + } + + /// Monomorphic type of input instructions + pub fn type_of_mono(&self) -> Result { + let instructions = self.parse_code()?; + let num_queries = self.parse_queries()?.len(); + instructions.debug()?; + Ok(instructions.type_of_mono(num_queries)?) + } + + /// Run Cli::parse_code, get input and queries, run queries, and run the + /// code on the resulting queries + pub async fn parse_and_run_result(&self) -> Result<(), CliError> { + let instructions = self.parse_code()?; + let mut stack = Stack::new(); + + let input_json_value = self.get_input()?; + stack.push_elem(input_json_value); + + let variables = serde_json::from_str(&self.variables)?; + let mut queries_result = self.parse_queries()? + .run(Arc::new(variables), + Arc::new(self.cache_location.clone())) + .await?; + queries_result.reverse(); + for query_result in queries_result { + stack.push_elem(query_result) + } + Ok(instructions.run(&mut stack)?) + } + + /// Run Cli::parse_and_run_result and print its result + pub async fn parse_and_run(&self) -> () { + match self.parse_and_run_result().await { + Ok(()) => println!("successful!"), + Err(e) => println!("failed:\n{}\n", e), + } + } + + /// Run a set of Cli arguments + pub async fn run(&self) -> () { + match self.command { + None => self.parse_and_run().await, + Some(Commands::Parse) => { + match self.parse_code() { + Ok(parsed) => { + parsed.debug() + .unwrap_or_else(|e| println!("Instrs::debug() failed:\n{}", e)) + }, + Err(e) => println!("parsing failed:\n{}", e), + } + }, + Some(Commands::TypeMono) => { + match self.type_of_mono() { + Ok(type_of) => println!("type:\n{}", type_of), + Err(e) => println!("type-mono failed:\n{}", e), + } + }, + } + } +} + diff --git a/src/elem.rs b/src/elem.rs new file mode 100644 index 0000000..497d835 --- /dev/null +++ b/src/elem.rs @@ -0,0 +1,256 @@ +use crate::arbitrary::{ArbitraryNumber, ArbitraryMap, ArbitraryValue}; + +use std::cmp; +use std::fmt; +use std::fmt::{Debug, Display, Formatter}; +use std::iter::IntoIterator; + +use enumset::{EnumSet, EnumSetType}; +use quickcheck::{empty_shrinker, Arbitrary, Gen}; +use serde::{Deserialize, Serialize}; +use serde_json::{Map, Number, Value}; + +/// An untyped Stack element +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub enum Elem { + /// Unit value (i.e. the empty struct) + Unit, + + /// A boolean + Bool(bool), + + /// A JSON Number + Number(Number), + + /// An array of bytes + Bytes(Vec), + + /// A JSON String + String(String), + + /// A JSON Array + Array(Vec), + + /// A JSON Object + Object(Map), + + /// A JSON Value + Json(Value), +} + +impl PartialOrd for Elem { + fn partial_cmp(&self, other: &Self) -> Option { + match (self, other) { + (Self::Unit, Self::Unit) => Some(cmp::Ordering::Equal), + (Self::Bool(x), Self::Bool(y)) => x.partial_cmp(y), + (Self::Bytes(x), Self::Bytes(y)) => x.partial_cmp(y), + (Self::Number(x), Self::Number(y)) => x.to_string().partial_cmp(&y.to_string()), + (Self::String(x), Self::String(y)) => x.partial_cmp(y), + (Self::Array(x), Self::Array(y)) => if x == y { Some(cmp::Ordering::Equal) } else { None }, + (Self::Object(x), Self::Object(y)) => if x == y { Some(cmp::Ordering::Equal) } else { None } + (_, _) => None, + } + } +} + +impl Display for Elem { + fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), fmt::Error> { + match self { + Self::Unit => write!(f, "()"), + Self::Bool(x) => write!(f, "{}", x), + Self::Number(x) => write!(f, "{}", x), + Self::Bytes(x) => write!(f, "{}", hex::encode(x.as_slice())), + Self::String(x) => write!(f, "{}", x), + Self::Array(x) => { + f.debug_list() + .entries(x.iter() + .map(|x| format!("{}", x))) + .finish() + }, + Self::Object(x) => { + f.debug_list() + .entries(x.iter() + .map(|(x, y)| format!("({}, {})", x.clone(), y.clone()))) + .finish() + }, + Self::Json(x) => write!(f, "{}", x), + } + } +} + + + +// EnumSetType implies: Copy, PartialEq, Eq +/// Elem as an enum of unit (i.e. empty struct) variants +#[derive(EnumSetType, Debug, PartialOrd, Ord, Serialize, Deserialize)] +pub enum ElemSymbol { + /// Elem::Unit + Unit, + + /// Elem::Bool + Bool, + + /// Elem::Number + Number, + + /// Elem::Bytes + Bytes, + + /// Elem::String + String, + + /// Elem::Array + Array, + + /// Elem::Object + Object, + + /// Elem::Json + Json, +} + +impl Arbitrary for ElemSymbol { + fn arbitrary(g: &mut Gen) -> Self { + let choices: Vec = EnumSet::all().iter().collect(); + *g.choose(&choices).unwrap_or_else(|| &Self::Unit) + } + + fn shrink(&self) -> Box> { + let self_copy = self.clone(); + Box::new(EnumSet::all().iter().filter(move |&x| x < self_copy)) + } +} + +impl ElemSymbol { + /// Given a Gen, use this ElemSymbol as a template of an Elem, and fill it + /// with Arbitrary contents. + /// + /// x.arbitrary_contents(g1).symbol() == x.arbitrary_contents(g2).symbol() + pub fn arbitrary_contents(&self, g: &mut Gen) -> Elem { + match self { + Self::Unit => Elem::Unit, + Self::Bool => Elem::Bool(Arbitrary::arbitrary(g)), + Self::Number => { + let x: ArbitraryNumber = Arbitrary::arbitrary(g); + Elem::Number(x.number) + }, + Self::Bytes => Elem::Bytes(Arbitrary::arbitrary(g)), + Self::String => Elem::String(Arbitrary::arbitrary(g)), + Self::Array => { + let xs: Vec = Arbitrary::arbitrary(g); + Elem::Array(xs.into_iter().map(|x| x.value).collect()) + }, + Self::Object => { + let xs: ArbitraryMap = Arbitrary::arbitrary(g); + Elem::Object(From::from(xs)) + }, + Self::Json => { + let xs: ArbitraryValue = Arbitrary::arbitrary(g); + Elem::Json(xs.value) + }, + } + } +} + +impl Arbitrary for Elem { + fn arbitrary(g: &mut Gen) -> Self { + let symbol: ElemSymbol = Arbitrary::arbitrary(g); + symbol.arbitrary_contents(g) + } + + // TODO: shrink + fn shrink(&self) -> Box> { + empty_shrinker() + // let self_copy = self.clone(); + // Box::new(EnumSet::all().iter().filter(move |&x| x < self_copy)) + } +} + + + +impl From for &'static str { + fn from(x: ElemSymbol) -> Self { + match x { + ElemSymbol::Unit => "Unit", + ElemSymbol::Bool => "Bool", + ElemSymbol::Bytes => "Bytes", + ElemSymbol::Number => "Number", + ElemSymbol::String => "String", + ElemSymbol::Array => "Array", + ElemSymbol::Object => "Object", + ElemSymbol::Json => "JSON", + } + } +} + +impl From<&Elem> for ElemSymbol { + fn from(x: &Elem) -> Self { + match x { + Elem::Unit => Self::Unit, + Elem::Bool(_) => Self::Bool, + Elem::Number(_) => Self::Number, + Elem::Bytes(_) => Self::Bytes, + Elem::String(_) => Self::String, + Elem::Array(_) => Self::Array, + Elem::Object(_) => Self::Object, + Elem::Json(_) => Self::Json, + } + } +} + +impl ElemSymbol { + #[cfg(test)] + pub fn default_elem(&self) -> Elem { + match self { + Self::Unit => Elem::Unit, + Self::Bool => Elem::Bool(Default::default()), + Self::Number => Elem::Number(From::::from(Default::default())), + Self::Bytes => Elem::Bytes(Default::default()), + Self::String => Elem::String(Default::default()), + Self::Array => Elem::Array(Default::default()), + Self::Object => Elem::Object(Default::default()), + Self::Json => Elem::Json(Default::default()), + } + } +} + +#[cfg(test)] +mod elem_symbol_tests { + use super::*; + + #[test] + fn test_from_default_elem() { + for symbol in EnumSet::::all().iter() { + assert_eq!(symbol, symbol.default_elem().symbol()) + } + } + + #[test] + fn test_to_default_elem() { + for default_elem in [ + Elem::Unit, + Elem::Bool(Default::default()), + Elem::Number(From::::from(Default::default())), + Elem::Bytes(Default::default()), + Elem::String(Default::default()), + Elem::Array(Default::default()), + Elem::Object(Default::default()), + Elem::Json(Default::default()), + ] { + assert_eq!(default_elem, default_elem.symbol().default_elem()) + } + } +} + +impl Elem { + /// ElemSymbol of this Elem + pub fn symbol(&self) -> ElemSymbol { + From::from(self) + } + + /// ElemSymbol String + pub fn symbol_str(&self) -> &'static str { + From::from(self.symbol()) + } +} + diff --git a/src/elem_type.rs b/src/elem_type.rs new file mode 100644 index 0000000..d068f9e --- /dev/null +++ b/src/elem_type.rs @@ -0,0 +1,267 @@ +use crate::location::Location; +use crate::elem::{Elem, ElemSymbol}; + +use thiserror::Error; + +use std::fmt; +use std::fmt::{Debug, Display, Formatter}; +use std::iter::{FromIterator, IntoIterator}; + +use enumset::EnumSet; +use serde::{Deserialize, Serialize}; + + +/// ElemType metadata +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)] +pub struct ElemTypeInfo { + /// Location of a variable associated with an ElemType + location: Location, +} + +// TODO: make fields private? +/// A set of ElemSymbol's representing a type, with included metadata +/// +/// E.g. {String, bool} represents the type that can be a String or a bool. +#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)] +pub struct ElemType { + /// The set of ElemSymbol's making up this type + pub type_set: EnumSet, + + /// Type metadata, for debugging, analysis, pretty printing + pub info: Vec, +} + +// Formatting: +// ``` +// ElemType { +// type_set: {A, B, C}, +// info: _, +// } +// ``` +// +// Results in: +// ``` +// {A, B, C} +// ``` +impl Display for ElemType { + fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), fmt::Error> { + write!(f, + "{{{}}}", + self.type_set.iter() + .fold(String::new(), + |memo, x| { + let x_str: &'static str = From::from(x); + if memo == "" { + x_str.to_string() + } else { + memo + ", " + &x_str.to_string() + } + } + )) + } +} + +#[cfg(test)] +mod elem_type_display_tests { + use super::*; + + #[test] + fn test_empty() { + let elem_type = ElemType { + type_set: EnumSet::empty(), + info: vec![], + }; + assert_eq!("{}", format!("{}", elem_type)); + } + + #[test] + fn test_singleton() { + for elem_symbol in EnumSet::all().iter() { + let elem_type = ElemType { + type_set: EnumSet::only(elem_symbol), + info: vec![], + }; + assert_eq!(format!("{{{}}}", Into::<&'static str>::into(elem_symbol)), + format!("{}", elem_type)); + } + } + + #[test] + fn test_all() { + assert_eq!("{Unit, Bool, Number, Bytes, String, Array, Object, JSON}", + format!("{}", ElemType::any(vec![]))); + } +} + +impl ElemSymbol { + /// ElemType of a particular ElemSymbol + pub fn elem_type(&self, locations: Vec) -> ElemType { + ElemType { + type_set: EnumSet::only(*self), + info: locations.iter() + .map(|&location| + ElemTypeInfo { + location: location, + }).collect(), + } + } +} + +impl Elem { + /// ElemType of a particular Elem. See ElemSymbol::elem_type + pub fn elem_type(&self, locations: Vec) -> ElemType { + self.symbol().elem_type(locations) + } +} + +impl ElemType { + /// Construct from a type_set and Vec of Location's + pub fn from_locations(type_set: EnumSet, + locations: Vec) -> Self { + ElemType { + type_set: type_set, + info: locations.iter() + .map(|&location| + ElemTypeInfo { + location: location, + }).collect(), + } + } + + /// The type of any Elem + pub fn any(locations: Vec) -> Self { + Self::from_locations( + EnumSet::all(), + locations) + } + + /// Calculate the union of two ElemType's and append their metadata + pub fn union(&self, other: Self) -> Self { + let both = self.type_set.union(other.type_set); + let mut both_info = self.info.clone(); + both_info.append(&mut other.info.clone()); + ElemType { + type_set: both, + info: both_info, + } + } + + /// Unify two ElemType's by returning their intersection and combining their metadata + /// + /// Fails if their intersection is empty (i.e. if it results in an empty type) + pub fn unify(&self, other: Self) -> Result { + let both = self.type_set.intersection(other.type_set); + if both.is_empty() { + Err(ElemTypeError::UnifyEmpty { + lhs: self.clone(), + rhs: other.clone(), + }) + } else { + let mut both_info = self.info.clone(); + both_info.append(&mut other.info.clone()); + Ok(ElemType { + type_set: both, + info: both_info, + }) + } + } +} + +#[derive(Clone, Debug, PartialEq, Error)] +pub enum ElemTypeError { + #[error("ElemType::unify applied to non-intersecting types:\nlhs:\n{lhs}\nrhs:\n{rhs}")] + UnifyEmpty { + lhs: ElemType, + rhs: ElemType, + }, +} + + +// TODO: relocate +// BEGIN DebugAsDisplay +#[derive(Clone, PartialEq, Eq)] +struct DebugAsDisplay +where + T: Display, +{ + t: T, +} + +impl Display for DebugAsDisplay +where + T: Display, +{ + fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), fmt::Error> { + write!(f, "{}", self.t) + } +} + +impl Debug for DebugAsDisplay +where + T: Display, +{ + fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), fmt::Error> { + write!(f, "{}", self.t) + } +} +// END DebugAsDisplay + +/// The type of a Stack +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct StackType { + /// List of types of the Stack, in the same order as any Stack of this type + pub types: Vec, +} + +impl IntoIterator for StackType { + type Item = ElemType; + type IntoIter = as IntoIterator>::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + self.types.into_iter() + } +} + +impl FromIterator for StackType { + fn from_iter(iter: T) -> Self + where + T: IntoIterator, + { + StackType { + types: FromIterator::from_iter(iter), + } + } +} + +impl StackType { + /// Length of the StackType, equal to the length of any Stack of this type + pub fn len(&self) -> usize { + self.types.len() + } + + /// Push the given ElemType to the StackType + pub fn push(&mut self, elem_type: ElemType) -> () { + self.types.insert(0, elem_type) + } + + /// Push (count) copies of the given ElemType to the StackType + pub fn push_n(&mut self, elem_type: ElemType, count: usize) -> () { + for _index in 0..count { + self.push(elem_type.clone()) + } + } +} + +// Uses DebugAsDisplay to eliminate '"' around strings: +// ["{Number}", "{Array, Object}"] -> [{Number}, {Array, Object}] +impl Display for StackType { + fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), fmt::Error> { + f.debug_list() + .entries(self.types + .iter() + .map(|x| DebugAsDisplay { t: format!("{}", x) })) + .finish()?; + Ok(()) + } +} + diff --git a/src/elems.rs b/src/elems.rs new file mode 100644 index 0000000..b603fe6 --- /dev/null +++ b/src/elems.rs @@ -0,0 +1,119 @@ +use crate::restack::RestackError; +use crate::elem::{Elem, ElemSymbol}; +use crate::elem_type::{ElemType, ElemTypeError, StackType}; +use crate::an_elem::AnElem; +use crate::stack::{Stack, StackError}; +use crate::types::context::ContextError; + +use std::fmt::Debug; +use std::marker::PhantomData; +use std::sync::Arc; + +use enumset::EnumSet; +use generic_array::{GenericArray, ArrayLength}; +use thiserror::Error; + +// TODO: +// - random type -> ~random inhabitant of the type +// - random typed program? + +/// Errors thrown by Elems::pop +#[derive(Clone, Debug, Error)] +pub enum ElemsPopError { + /// "Elems::pop singleton: tried to pop an Elem that was not found:\nelem_symbol:\n{elem_symbol:?}\n\n{error}" + #[error("Elems::pop singleton: tried to pop an Elem that was not found:\nelem_symbol:\n{elem_symbol:?}\n\n{error}")] + PopSingleton { + /// Expected type set + elem_symbol: EnumSet, + /// Extended StackError + error: StackError, + }, + + /// "Elems::pop: tried to pop a set of Elem's that were not found:\n{hd_error}\n\n{tl_errors}" + #[error("Elems::pop: tried to pop a set of Elem's that were not found:\n{hd_error}\n\n{tl_errors}")] + Pop { + /// Self::Hd pop error + hd_error: Arc, + /// Self::Tl pop error + tl_errors: Arc, + }, + + /// "Elems::pop: generic_array internal error\n\nelem_set:\n{elem_set:?}\n\nvec:\n{vec:?}\n\nsize:\n{size}" + // TODO: add detail + #[error("Elems::pop: generic_array internal error\n\nelem_set:\n{elem_set:?}\n\nvec:\n{vec:?}\n\nsize:\n{size}")] + GenericArray { + /// Expected type set + elem_set: EnumSet, + /// Found Elem's + vec: Vec, + /// Expected size + size: usize, + }, + + /// "IsList::pop (Cons, Hd): tried to pop a set of Elem's that were not found:\nstack_type:\n{stack_type}\n\nelem_set:\n{elem_set}\n\nstack_type:\n{stack_type_of}\n\nerror:\n{error}" + #[error("IsList::pop (Cons, Hd): tried to pop a set of Elem's that were not found:\nstack_type:\n{stack_type}\n\nelem_set:\n{elem_set}\n\nstack_type:\n{stack_type_of}\n\nerror:\n{error}")] + IsListHd { + /// Stack found + stack_type: StackType, + /// Expected type + elem_set: ElemType, + /// Stack type found + stack_type_of: StackType, + /// Extended error + error: Arc, + }, + + /// "IsList::pop (Cons, Tl): tried to pop a set of Elem's that were not found:\nstack_type:\n{stack_type}\n\nstack_type_of:\n{stack_type_of}\n\nerror:\n{error}" + #[error("IsList::pop (Cons, Tl): tried to pop a set of Elem's that were not found:\nstack_type:\n{stack_type}\n\nstack_type_of:\n{stack_type_of}\n\nerror:\n{error}")] + IsListTl { + /// Stack found + stack_type: StackType, + /// Expected type + stack_type_of: StackType, + /// Extended error + error: Arc, + }, + + /// "Instr::run: ElemTypeError:\n{0}" + #[error("Instr::run: ElemTypeError:\n{0}")] + RestackError(RestackError), + + /// "Elems::elem_type (Or): Set includes repeated type:\n{0}" + #[error("Elems::elem_type (Or): Set includes repeated type:\n{0}")] + ElemTypeError(ElemTypeError), + + /// "::type_of(): ContextError when adding Tl type: {0:?}" + #[error("::type_of(): ContextError when adding Tl type: {0:?}")] + ReturnOrTl(Arc), + + /// "::type_of(): ContextError when adding type:\n{0}" + #[error("::type_of(): ContextError when adding type:\n{0}")] + ReturnOrContextError(ContextError), +} + +/// A set of Elem's with multiplicities, given by Self::N +pub trait Elems: Clone + Debug + IntoIterator { + /// Head Elem + type Hd: AnElem; + + /// Multiplicity of the head Elem + type N: ArrayLength; + + /// Tail Elems, or Nil + type Tl: Elems; + + // fn left(s: PhantomData, x: GenericArray) -> Self; + // fn right(s: PhantomData, x: Self::Tl) -> Self; + + /// Unpack Self given handlers for Self::Hd and Self::Tl + fn or) -> T, G: Fn(&Self::Tl) -> T>(&self, f: F, g: G) -> T; + + /// Pop Self from a mutable Stack + fn pop(_x: PhantomData, stack: &mut Stack) -> Result + where + Self: Sized; + + /// Convert to an ElemType + fn elem_type(t: PhantomData) -> Result; +} + diff --git a/src/elems_all.rs b/src/elems_all.rs new file mode 100644 index 0000000..8cc0714 --- /dev/null +++ b/src/elems_all.rs @@ -0,0 +1,67 @@ +use crate::elem::Elem; +use crate::elems_singleton::Singleton; +use crate::elems_or::Or; + +use std::fmt::Debug; + +use serde_json::{Map, Number, Value}; +use generic_array::functional::FunctionalSequence; +use generic_array::{GenericArray, ArrayLength}; + + +// TODO: AnElem: &self -> AllElems +/// All possible Elem types, encoded using Or and Singleton. +pub type AllElems = + Or<(), N, + Or, N, + Or, N, + Or, N, + Singleton>>>>>>>; + +impl AllElems +where + N: Debug + + ArrayLength<()> + + ArrayLength + + ArrayLength + + ArrayLength> + + ArrayLength + + ArrayLength> + + ArrayLength> + + ArrayLength + + ArrayLength, +{ + /// Untype AllElems to Elem + pub fn untyped(&self) -> GenericArray { + match self { + Or::Left(array) => { + array.map(|_x| Elem::Unit) + }, + Or::Right(Or::Left(array)) => { + array.map(|&x| Elem::Bool(x)) + }, + Or::Right(Or::Right(Or::Left(array))) => { + array.map(|x| Elem::Number(x.clone())) + }, + Or::Right(Or::Right(Or::Right(Or::Left(array)))) => { + array.map(|x| Elem::Bytes(x.clone())) + }, + Or::Right(Or::Right(Or::Right(Or::Right(Or::Left(array))))) => { + array.map(|x| Elem::String(x.clone())) + }, + Or::Right(Or::Right(Or::Right(Or::Right(Or::Right(Or::Left(array)))))) => { + array.map(|x| Elem::Array(x.clone())) + }, + Or::Right(Or::Right(Or::Right(Or::Right(Or::Right(Or::Right(Or::Left(array))))))) => { + array.map(|x| Elem::Object(x.clone())) + }, + Or::Right(Or::Right(Or::Right(Or::Right(Or::Right(Or::Right(Or::Right(Singleton { array }))))))) => { + array.map(|x| Elem::Json(x.clone())) + }, + } + } +} + diff --git a/src/elems_input.rs b/src/elems_input.rs new file mode 100644 index 0000000..0bea87a --- /dev/null +++ b/src/elems_input.rs @@ -0,0 +1,25 @@ +use crate::an_elem::AnElem; +use crate::elems_singleton::Singleton; +use crate::elems_or::Or; +use crate::elems::Elems; + +use std::fmt::Debug; + +use generic_array::ArrayLength; + +/// Input Elems +pub trait IElems: Elems {} + +impl IElems for Singleton +where + T: AnElem, + N: ArrayLength + Debug, +{} + +impl IElems for Or +where + T: AnElem, + N: ArrayLength + Debug, + U: IElems, +{} + diff --git a/src/elems_input_output.rs b/src/elems_input_output.rs new file mode 100644 index 0000000..aae786a --- /dev/null +++ b/src/elems_input_output.rs @@ -0,0 +1,28 @@ +use crate::elem::Elem; +use crate::an_elem_return::Return; +use crate::types::Type; +use crate::elems::{Elems, ElemsPopError}; + +use std::marker::PhantomData; + +use generic_array::GenericArray; + +/// A set of optionally-returned AnElem's with multiplicities +pub trait IOElems: Elems { + /// Unpack either of the Left/Right options: + /// - The Left case is a GenericArray of Self::N copies of Self::Hd, + /// returning Self::Hd + /// - The Right case is Self::Tl + fn or_return(&self, f: F, g: G) -> T + where + F: Fn(&GenericArray, &Return) -> T, + G: Fn(&Self::Tl) -> T; + + // TODO: rename to 'returned' to match Return + /// The returned Elem, if any has been returned + fn returning(&self) -> Option; + + /// The type of the set of optionally-returned AnElem's with multiplicities + fn type_of(t: PhantomData) -> Result; +} + diff --git a/src/elems_input_output_or.rs b/src/elems_input_output_or.rs new file mode 100644 index 0000000..345d877 --- /dev/null +++ b/src/elems_input_output_or.rs @@ -0,0 +1,142 @@ +use crate::elem::Elem; +use crate::elem_type::ElemType; +use crate::an_elem::AnElem; +use crate::stack::Stack; +use crate::types::Type; +use crate::elems_or::Or; +use crate::elems::{Elems, ElemsPopError}; +use crate::elems_input_output::IOElems; +use crate::an_elem_return::Return; + +use std::marker::PhantomData; +use std::fmt::Debug; +use std::sync::Arc; + +use generic_array::{GenericArray, ArrayLength}; + +/// A version of Or where the Left side is equivalent to ReturnSingleton, +/// i.e. Or, U> with appropriate trait constraints to +/// ensure that exactly one typed value is returned. +#[derive(Clone, Debug)] +pub enum ReturnOr +where + T: AnElem, + N: ArrayLength + Debug, + U: Elems, +{ + /// ReturnSingleton + Left { + /// N copies of AnElem type T + array: GenericArray, + + /// Returning a single copy of AnElem type T + returning: Return, + }, + + /// The Right or continuation variant. See Or. + Right(U), +} + +impl IntoIterator for ReturnOr +where + T: AnElem, + N: ArrayLength + Debug, + U: Elems, +{ + type Item = Elem; + type IntoIter = as IntoIterator>::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + match self { + Self::Left { array, .. } => Or::::Left(array).into_iter(), + Self::Right(xs) => Or::Right(xs).into_iter(), + } + } +} + +impl Elems for ReturnOr +where + T: AnElem, + N: ArrayLength + Debug, + U: Elems, +{ + type Hd = T; + type N = N; + type Tl = U; + + fn or) -> V, G: Fn(&Self::Tl) -> V>(&self, f: F, g: G) -> V { + match self { + Self::Left { array, .. } => f(array), + Self::Right(x) => g(x), + } + } + + fn pop(_x: PhantomData, stack: &mut Stack) -> Result + where + Self: Sized, + { + as Elems>::pop(PhantomData, stack) + .map(|x| { + match x { + Or::Left(array) => Self::Left { + array: array, + returning: Return::new(), + }, + Or::Right(y) => Self::Right(y), + } + }) + } + + fn elem_type(_t: PhantomData) -> Result { + Elems::elem_type(PhantomData::>) + } +} + +impl IOElems for ReturnOr +where + T: AnElem, + N: ArrayLength + Debug, + U: IOElems +{ + fn or_return(&self, f: F, g: G) -> V + where + F: Fn(&GenericArray, &Return) -> V, + G: Fn(&Self::Tl) -> V, + { + match self { + Self::Left { array, returning } => { + f(array, returning) + }, + Self::Right(x) => g(x), + } + } + + fn returning(&self) -> Option { + match self { + Self::Left { returning, .. } => { + returning.returned().map(|x| x.to_elem()) + }, + Self::Right(x) => x.returning(), + } + } + + // TODO: add error info + fn type_of(_t: PhantomData) -> Result { + let mut type_tl = IOElems::type_of(PhantomData::) + .map_err(|e| ElemsPopError::ReturnOrTl(Arc::new(e)))?; + let last_type_id = type_tl.context.max_type_id() + .map_err(|e| ElemsPopError::ReturnOrContextError(e))?; + let next_type_id = type_tl.context.push(ElemType { + type_set: AnElem::elem_symbol(PhantomData::), + info: vec![], + }); + type_tl.context.unify(last_type_id, next_type_id) + .map_err(|e| ElemsPopError::ReturnOrContextError(e))?; + Ok(type_tl) + } +} + + + + + diff --git a/src/elems_input_output_singleton.rs b/src/elems_input_output_singleton.rs new file mode 100644 index 0000000..79eb36c --- /dev/null +++ b/src/elems_input_output_singleton.rs @@ -0,0 +1,107 @@ +use crate::elem::Elem; +use crate::elem_type::ElemType; +use crate::an_elem::AnElem; +use crate::stack::Stack; +use crate::types::context::Context; +use crate::types::Type; +use crate::elems_singleton::Singleton; +use crate::elems::{Elems, ElemsPopError}; +use crate::elems_input_output::IOElems; +use crate::an_elem_return::Return; + +use std::marker::PhantomData; +use std::fmt::Debug; + +use generic_array::{GenericArray, ArrayLength}; +use typenum::marker_traits::Unsigned; + +/// A Singleton Return-ing AnElem of the same type, but always with a +/// multiplicity of one. +#[derive(Clone, Debug)] +pub struct ReturnSingleton +where + T: AnElem, + N: ArrayLength + Debug, +{ + /// Wrapped Singleton + pub singleton: Singleton, + + /// Typed return slot + pub returning: Return, +} + +impl IntoIterator for ReturnSingleton +where + T: AnElem, + N: ArrayLength + Debug, +{ + type Item = Elem; + type IntoIter = as IntoIterator>::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + self.singleton.into_iter() + } +} + +impl Elems for ReturnSingleton +where + T: AnElem, + N: ArrayLength + Debug, +{ + type Hd = T; + type N = N; + type Tl = Singleton; + + // fn left(_s: PhantomData, x: GenericArray) -> Self { Elems::left(PhantomData::>, x) } + // fn right(_s: PhantomData, x: Self::Tl) -> Self { Elems::left(PhantomData::>, x) } + fn or) -> U, G: Fn(&Self::Tl) -> U>(&self, f: F, g: G) -> U { + self.singleton.or(f, g) + } + + fn pop(_x: PhantomData, stack: &mut Stack) -> Result + where + Self: Sized, + { + Ok(ReturnSingleton { + singleton: Elems::pop(PhantomData::>, stack)?, + returning: Return::new(), + }) + } + + fn elem_type(_t: PhantomData) -> Result { + Elems::elem_type(PhantomData::>) + } +} + +impl IOElems for ReturnSingleton +where + T: AnElem, + N: ArrayLength + Debug, +{ + fn or_return(&self, f: F, _g: G) -> U + where + F: Fn(&GenericArray, &Return) -> U, + G: Fn(&Self::Tl) -> U, + { + f(&self.singleton.array, &self.returning) + } + + fn returning(&self) -> Option { + self.returning.returned().map(|x| x.to_elem()) + } + + fn type_of(_t: PhantomData) -> Result { + let num_inputs = ::to_usize(); + let mut context = Context::new(); + let type_id = context.push(ElemType { + type_set: AnElem::elem_symbol(PhantomData::), + info: vec![], + }); + Ok(Type { + context: context, + i_type: (1..num_inputs).into_iter().map(|_| type_id).collect(), + o_type: vec![type_id], + }) + } +} + diff --git a/src/elems_list.rs b/src/elems_list.rs new file mode 100644 index 0000000..0d1ccba --- /dev/null +++ b/src/elems_list.rs @@ -0,0 +1,73 @@ +use crate::elem::Elem; +use crate::elem_type::StackType; +use crate::stack::Stack; +use crate::elems::{Elems, ElemsPopError}; + +use std::marker::PhantomData; +use std::fmt::Debug; +use std::sync::Arc; + +/// A non-empty ordered list of Elems +pub trait IsList: Clone + Debug + IntoIterator { + /// The Hd Elems, or unit if empty + type Hd: Elems; + + /// The rest of the list, or Nil + type Tl: IsList; + + /// Return Self if empty + fn empty_list() -> Option where Self: Sized; + + /// Cons Self::Hd with Self::Tl + fn cons_list(x: Self::Hd, xs: Self::Tl) -> Self; + + /// Is it empty? + fn is_empty(&self) -> bool { + ::empty_list().is_some() + } + + /// Self::Hd can always be returned + fn hd(self) -> Self::Hd; + + /// Self::Tl can always be returned + fn tl(self) -> Self::Tl; + + // fn cons(self, x: T) -> Cons + // where + // Self: Sized, + // { + // Cons { + // hd: x, + // tl: self, + // } + // } + + /// The StackType of this list of Elems + fn stack_type(t: PhantomData) -> Result; + + /// Pop this type from an untyped Stack + fn pop(_x: PhantomData, stack: &mut Stack) -> Result + where + Self: Sized, + { + match ::empty_list() { + Some(x) => Ok(x), + None => { + let original_stack = stack.clone(); + let x = ::pop(PhantomData, stack).or_else(|e| Err(ElemsPopError::IsListHd { + stack_type: IsList::stack_type(PhantomData::)?, + elem_set: Elems::elem_type(PhantomData::)?, + stack_type_of: original_stack.clone().type_of(), + error: Arc::new(e), + }))?; + let xs = ::pop(PhantomData, stack).or_else(|e| Err(ElemsPopError::IsListTl { + stack_type: IsList::stack_type(PhantomData::)?, + stack_type_of: original_stack.clone().type_of(), + error: Arc::new(e), + }))?; + Ok(::cons_list(x, xs)) + } + } + } +} + diff --git a/src/elems_list_cons.rs b/src/elems_list_cons.rs new file mode 100644 index 0000000..d0777f3 --- /dev/null +++ b/src/elems_list_cons.rs @@ -0,0 +1,94 @@ +use crate::elem::Elem; +use crate::elem_type::StackType; +use crate::elems::{Elems, ElemsPopError}; +use crate::elems_list::IsList; + +use std::marker::PhantomData; +use std::fmt::{self, Debug, Formatter}; + +use typenum::marker_traits::Unsigned; + +/// A non-empty list of Elems, where the first element is explicitly provided +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct Cons { + /// The head of the list, which must be Elems + pub hd: T, + + /// The tail of the list, which IsList + pub tl: U, +} + +/// IntoIterator applied to Cons::hd and Cons::tl +pub struct IterCons { + hd: ::IntoIter, + tl: ::IntoIter, +} + +impl Debug for IterCons +where + T: Elems, + U: IsList, + ::IntoIter: Debug, + ::IntoIter: Debug, +{ + fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), fmt::Error> { + write!(f, "Cons {{\n hd: {:?},\n tl: {:?}\n}}", self.hd, self.tl) + } +} + +impl IntoIterator for Cons { + type Item = Elem; + type IntoIter = IterCons; + + fn into_iter(self) -> Self::IntoIter { + IterCons { + hd: self.hd.into_iter(), + tl: self.tl.into_iter(), + } + } +} + +impl Iterator for IterCons { + type Item = Elem; + + fn next(&mut self) -> Option { + self.hd.next().or_else(|| self.tl.next()) + } +} + +impl IsList for Cons { + type Hd = T; + type Tl = U; + + fn empty_list() -> Option where Self: Sized { + None + } + + fn cons_list(x: Self::Hd, xs: Self::Tl) -> Self { + Cons { + hd: x, + tl: xs, + } + } + + // fn is_empty(&self) -> bool { + // false + // } + + fn hd(self) -> Self::Hd { + self.hd + } + + fn tl(self) -> Self::Tl { + self.tl + } + + fn stack_type(_t: PhantomData) -> Result { + let elem_type_hd = Elems::elem_type(PhantomData::)?; + let elem_type_hd_count = <::N as Unsigned>::to_usize(); + let mut stack_type_tl = IsList::stack_type(PhantomData::)?; + stack_type_tl.push_n(elem_type_hd, elem_type_hd_count); + Ok(stack_type_tl) + } +} + diff --git a/src/elems_list_input.rs b/src/elems_list_input.rs new file mode 100644 index 0000000..28ed825 --- /dev/null +++ b/src/elems_list_input.rs @@ -0,0 +1,19 @@ +use crate::elems_input::IElems; +use crate::elems_list::IsList; +use crate::elems_list_nil::Nil; +use crate::elems_list_cons::Cons; + +/// IsList that defines inputs, but no outputs. See IOList for more info +pub trait IList: IsList { +} + +impl IList for Nil { +} + +impl IList for Cons +where + T: IElems, + U: IList, +{ +} + diff --git a/src/elems_list_input_output.rs b/src/elems_list_input_output.rs new file mode 100644 index 0000000..03f9069 --- /dev/null +++ b/src/elems_list_input_output.rs @@ -0,0 +1,47 @@ +use crate::elem::Elem; +use crate::types::Type; +use crate::elems::{Elems, ElemsPopError}; +use crate::elems_input::IElems; +use crate::elems_input_output::IOElems; +use crate::elems_list::IsList; +use crate::elems_list_cons::Cons; + +use std::marker::PhantomData; + +use typenum::marker_traits::Unsigned; + +/// Input-output type of an instruction +pub trait IOList: IsList { + /// Returned IOElems + type Return: IOElems; + + // TODO: rename to returned a la Return. + /// Returned value, if set + fn returning(&self) -> Option; + + /// IOList's define a complete input/output Type, with exacly one return value + fn type_of(t: PhantomData) -> Result; +} + +impl IOList for Cons +where + T: IElems, + U: IOList, +{ + type Return = U::Return; + + fn returning(&self) -> Option { + self.tl.returning() + } + + // TODO: test + fn type_of(_t: PhantomData) -> Result { + let num_elem_type_hd = <::N as Unsigned>::to_usize(); + let elem_type_hd = Elems::elem_type(PhantomData::)?; + let mut type_tl = IOList::type_of(PhantomData::)?; + + type_tl.prepend_inputs(num_elem_type_hd, elem_type_hd); + Ok(type_tl) + } +} + diff --git a/src/elems_list_input_output_cons.rs b/src/elems_list_input_output_cons.rs new file mode 100644 index 0000000..8c0a224 --- /dev/null +++ b/src/elems_list_input_output_cons.rs @@ -0,0 +1,94 @@ +use crate::elem::Elem; +use crate::elem_type::StackType; +use crate::types::Type; +use crate::elems::ElemsPopError; +use crate::elems_input_output::IOElems; +use crate::elems_list::IsList; +use crate::elems_list_cons::{Cons, IterCons}; +use crate::elems_list_input::IList; +use crate::elems_list_input_output::IOList; + +use std::marker::PhantomData; +use std::fmt::Debug; + +/// Cons whose hd type is restricted to IOElems and whose tl type is restricted to IList +/// +/// This ensures that there can only be one ConsOut per IOList. +/// By restricting instances of IOList, it must contain exactly one ConsOut. +#[derive(Clone, Debug)] +pub struct ConsOut +where + T: IOElems, + U: IList, +{ + cons: Cons, +} + +impl IntoIterator for ConsOut { + type Item = Elem; + type IntoIter = IterCons; + + fn into_iter(self) -> Self::IntoIter { + self.cons.into_iter() + } +} + +impl IsList for ConsOut +where + T: IOElems, + U: IList +{ + type Hd = T; + type Tl = U; + + fn empty_list() -> Option where Self: Sized { + None + } + + fn cons_list(x: Self::Hd, xs: Self::Tl) -> Self { + ConsOut { + cons: Cons { + hd: x, + tl: xs, + }, + } + } + + fn is_empty(&self) -> bool { + self.cons.is_empty() + } + + fn hd(self) -> Self::Hd { + self.cons.hd() + } + + fn tl(self) -> Self::Tl { + self.cons.tl() + } + + fn stack_type(_t: PhantomData) -> Result { + IsList::stack_type(PhantomData::>) + } +} + +impl IOList for ConsOut +where + T: IOElems, + U: IList, +{ + type Return = T; + + fn returning(&self) -> Option { + self.cons.hd.returning() + } + + // TODO: add info to errors + fn type_of(_t: PhantomData) -> Result { + // let num_elem_type_hd = <::N as Unsigned>::to_usize(); + let mut type_hd = IOElems::type_of(PhantomData::)?; + let elem_type_tl = IsList::stack_type(PhantomData::)?; + type_hd.append_inputs(elem_type_tl); + Ok(type_hd) + } +} + diff --git a/src/elems_list_nil.rs b/src/elems_list_nil.rs new file mode 100644 index 0000000..30b7013 --- /dev/null +++ b/src/elems_list_nil.rs @@ -0,0 +1,58 @@ +use crate::elem::Elem; +use crate::elem_type::StackType; +use crate::elems_list::IsList; +use crate::elems_singleton::Singleton; +use crate::elems::ElemsPopError; + +use std::marker::PhantomData; +use std::fmt::Debug; + +use generic_array::sequence::GenericSequence; +use generic_array::typenum::U0; +use generic_array::GenericArray; + +/// An empty IsList, i.e. an empty list of Elems +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub struct Nil {} + +impl Iterator for Nil { + type Item = Elem; + + fn next(&mut self) -> Option { + None + } +} + +impl IsList for Nil { + type Hd = Singleton<(), U0>; + type Tl = Nil; + + fn empty_list() -> Option where Self: Sized { + Some(Self {}) + } + + fn cons_list(_x: Self::Hd, _xs: Self::Tl) -> Self { + Self {} + } + + // fn is_empty(&self) -> bool { + // true + // } + + fn hd(self) -> Self::Hd { + Singleton { + array: GenericArray::generate(|_| ()), + } + } + + fn tl(self) -> Self::Tl { + Self {} + } + + fn stack_type(_t: PhantomData) -> Result { + Ok(StackType { + types: vec![], + }) + } +} + diff --git a/src/elems_or.rs b/src/elems_or.rs new file mode 100644 index 0000000..38c2d56 --- /dev/null +++ b/src/elems_or.rs @@ -0,0 +1,139 @@ +use crate::stack::Stack; +use crate::elem::Elem; +use crate::elem_type::ElemType; +use crate::an_elem::AnElem; +use crate::elems_singleton::Singleton; +use crate::elems::{Elems, ElemsPopError}; + +use std::marker::PhantomData; +use std::fmt::{self, Debug, Formatter}; +use std::sync::Arc; + +use generic_array::{GenericArray, ArrayLength}; + +/// Either AnElem with type T an multiplicity N or Elems U, i.e. +/// Or is equivalent to Result, U> with constraints +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum Or +where + T: AnElem, + N: ArrayLength + Debug, + U: Elems, +{ + /// AnElem with type T and multiplicity N. Equivalent to Singleton unwrapped + Left(GenericArray), + /// Other Elems + Right(U), +} + +pub enum IterOr +where + T: AnElem, + N: ArrayLength + Debug, + U: Elems, +{ + Left( as IntoIterator>::IntoIter), + Right(::IntoIter), +} + +impl Debug for IterOr +where + T: AnElem, + N: ArrayLength + Debug, + U: Elems, + ::IntoIter: Debug, +{ + fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), fmt::Error> { + match self { + Self::Left(x) => write!(f, "IterOr::Left({:?})", x), + Self::Right(x) => write!(f, "IterOr::Right({:?})", x), + } + } +} + +impl Iterator for IterOr +where + T: AnElem, + N: ArrayLength + Debug, + U: Elems, +{ + type Item = Elem; + + fn next(&mut self) -> Option { + match self { + Self::Left(x) => x.next(), + Self::Right(x) => x.next(), + } + } +} + +impl IntoIterator for Or +where + T: AnElem, + N: ArrayLength + Debug, + U: Elems, +{ + type Item = Elem; + type IntoIter = IterOr; + + fn into_iter(self) -> Self::IntoIter { + match self { + Self::Left(array) => IterOr::Left( + Singleton { + array: array, + }.into_iter() + ), + Self::Right(xs) => IterOr::Right(xs.into_iter()), + } + } +} + +impl Elems for Or +where + T: AnElem, + N: ArrayLength + Debug, + U: Elems, +{ + type Hd = T; + type N = N; + type Tl = U; + + // fn left(_s: PhantomData, x: GenericArray) -> Self { Self::Left(x) } + // fn right(_s: PhantomData, x: Self::Tl) -> Self { Self::Right(x) } + fn or) -> V, G: Fn(&Self::Tl) -> V>(&self, f: F, g: G) -> V { + match self { + Self::Left(x) => f(x), + Self::Right(x) => g(x), + } + } + + fn pop(_x: PhantomData, stack: &mut Stack) -> Result + where + Self: Sized, + { + match as Elems>::pop(PhantomData, stack) { + Ok(Singleton { array }) => Ok(Self::Left(array)), + Err(hd_error) => { + Elems::pop(PhantomData::, stack) + .map(|x| Self::Right(x)) + .map_err(|tl_errors| { + ElemsPopError::Pop { + hd_error: Arc::new(hd_error), + tl_errors: Arc::new(tl_errors), + } + }) + }, + } + } + + // TODO: add info + fn elem_type(_t: PhantomData) -> Result { + let elem_type_hd = ElemType { + type_set: AnElem::elem_symbol(PhantomData::), + info: vec![], + }; + let elem_type_tl = Elems::elem_type(PhantomData::)?; + Ok(elem_type_hd.union(elem_type_tl)) + } +} + diff --git a/src/elems_singleton.rs b/src/elems_singleton.rs new file mode 100644 index 0000000..3f8216d --- /dev/null +++ b/src/elems_singleton.rs @@ -0,0 +1,85 @@ +use crate::stack::Stack; +use crate::elem::Elem; +use crate::elem_type::ElemType; +use crate::an_elem::AnElem; +use crate::elems::{Elems, ElemsPopError}; + +use std::fmt::Debug; +use std::marker::PhantomData; + +use generic_array::{GenericArray, GenericArrayIter, ArrayLength}; +use typenum::marker_traits::Unsigned; + +// TODO: rename +/// AnElem with type T and multiplicity N +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct Singleton +where + T: AnElem, + N: ArrayLength + Debug, +{ + /// An array of AnElem with multiplicity N + pub array: GenericArray, +} + +impl IntoIterator for Singleton +where + T: AnElem, + N: ArrayLength + Debug, +{ + type Item = Elem; + type IntoIter = std::iter::Map, fn(T) -> Elem>; + + fn into_iter(self) -> Self::IntoIter { + self.array.into_iter().map(AnElem::to_elem) + } +} + +impl Elems for Singleton +where + T: AnElem, + N: ArrayLength + Debug, +{ + type Hd = T; + type N = N; + type Tl = Singleton; + + // fn left(_s: PhantomData, x: GenericArray) -> Self { Singleton { t: x, } } + // fn right(_s: PhantomData, x: Self::Tl) -> Self { x } + fn or) -> U, G: Fn(&Self::Tl) -> U>(&self, f: F, _g: G) -> U { + f(&self.array) + } + + fn pop(_x: PhantomData, stack: &mut Stack) -> Result + where + Self: Sized, + { + let vec = (0..::to_usize()).map(|_array_ix| { + stack + .pop_elem(PhantomData::) + .map_err(|e| ElemsPopError::PopSingleton { + elem_symbol: AnElem::elem_symbol(PhantomData::), + error: e, + }) + }).collect::, ElemsPopError>>()?; + let array = GenericArray::from_exact_iter(vec.clone()).ok_or_else(|| { + ElemsPopError::GenericArray { + elem_set: AnElem::elem_symbol(PhantomData::), + vec: vec.into_iter().map(|x| x.to_elem()).collect(), + size: ::to_usize(), + } + })?; + Ok(Singleton { + array: array, + }) + } + + // TODO: add info + fn elem_type(_t: PhantomData) -> Result { + Ok(ElemType { + type_set: AnElem::elem_symbol(PhantomData::), + info: vec![], + }) + } +} + diff --git a/src/executor.rs b/src/executor.rs deleted file mode 100644 index 6072c4b..0000000 --- a/src/executor.rs +++ /dev/null @@ -1,64 +0,0 @@ -use crate::parse::{Elem, Instruction, Instructions}; - -use thiserror::Error; - -#[derive(Debug, Default)] -pub struct Executor { - stack: Vec, -} - -impl Executor { - pub fn consume(&mut self, expressions: Instructions) -> Result<(), ExecError> { - for expr in expressions { - match expr { - Instruction::Push(elem) => self.push(elem), - Instruction::FnAssertTrue => self.assert_true()?, - Instruction::FnCheckEqual => self.check_equal()?, - Instruction::FnHashSha256 => self.sha256()?, - } - } - Ok(()) - } - - fn assert_true(&mut self) -> Result<(), ExecError> { - match self.pop()? { - Elem::Bool(true) => Ok(()), - found => Err(ExecError::AssertTrueFailed(found)), - } - } - - fn check_equal(&mut self) -> Result<(), ExecError> { - let one = self.pop()?; - let other = self.pop()?; - self.push(Elem::Bool(one == other)); - Ok(()) - } - - fn sha256(&mut self) -> Result<(), ExecError> { - match self.pop()? { - Elem::BytesN(bytes) => { - self.push(Elem::Bytes32(super::sha256(&bytes))); - Ok(()) - } - elem => Err(ExecError::HashUnsupportedType(elem.simple_type())), - } - } - - fn push(&mut self, elem: Elem) { - self.stack.push(elem) - } - - fn pop(&mut self) -> Result { - self.stack.pop().ok_or_else(|| ExecError::EmptyStack) - } -} - -#[derive(Debug, Error)] -pub enum ExecError { - #[error("expected Elem::Bool(true), found {0:?}")] - AssertTrueFailed(Elem), - #[error("tried to pop from an empty stack")] - EmptyStack, - #[error("attempted to hash an elem of an unsupported type ({0})")] - HashUnsupportedType(&'static str), -} diff --git a/src/json_template.rs b/src/json_template.rs new file mode 100644 index 0000000..4d944e2 --- /dev/null +++ b/src/json_template.rs @@ -0,0 +1,219 @@ +use std::fmt::{self, Formatter}; +use std::sync::Arc; +use std::marker::PhantomData; + +use serde::{Deserialize, Deserializer, Serialize, Serializer}; +use serde::de::{Visitor, MapAccess}; + +use indexmap::IndexMap; +use serde_json::{Map, Number, Value}; +use thiserror::Error; + +/// Map defined to be convenient to Serialize and Deserialize +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct TMap { + map: IndexMap, +} + +impl TMap { + /// IndexMap::new + pub fn new() -> Self { + TMap { + map: IndexMap::new(), + } + } + + /// IndexMap::insert + pub fn insert(&mut self, key: String, value: T) -> Option { + self.map.insert(key, value) + } +} + +impl Serialize for TMap { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + self.map.clone() + .into_iter() + .map(|(x, y)| Ok((x, y.to_json()?))) + .collect::, TValueError>>() + .map_err(|e| serde::ser::Error::custom(format!("Serialize for TMap:\n{:?}", e)))? + .serialize(serializer) + } +} + +struct TMapVisitor { + marker: PhantomData TMap> +} + +impl TMapVisitor { + fn new() -> Self { + TMapVisitor { + marker: PhantomData + } + } +} + +impl<'de, T> Visitor<'de> for TMapVisitor +where + T: Deserialize<'de>, +{ + type Value = TMap; + + // Format a message stating what data this Visitor expects to receive. + fn expecting(&self, formatter: &mut Formatter<'_>) -> fmt::Result { + // TODO: extend description + formatter.write_str("TMap") + } + + fn visit_map(self, mut access: M) -> Result + where + M: MapAccess<'de>, + { + let mut map = IndexMap::with_capacity(access.size_hint().unwrap_or(0)); + + while let Some((key, value)) = access.next_entry()? { + map.insert(key, value); + } + + Ok(TMap { + map: map, + }) + } +} + +impl<'de, T> Deserialize<'de> for TMap +where + T: Deserialize<'de>, +{ + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + deserializer.deserialize_map(TMapVisitor::new()) + } +} + +/// serde_json::Value with Var's +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub enum TValue { + /// serde_json::Null + Null, + + /// serde_json::Bool + Bool(bool), + + /// serde_json::Number + Number(Number), + + /// serde_json::String + String(String), + + /// serde_json::Array with Var's + Array(Vec), + + /// serde_json::Object with Var's + Object(TMap), + + /// Named variable. See TValue::run for more detail + Var(String), +} + +/// An error encountered during the execution of TValue::run +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct TValueRunError { + variable: String, + value: Vec, + variables: Map, +} + +impl TValue { + /// Convert from JSON, ignoring Var's + /// + /// Use Deserialize to convert including Var's + pub fn from_json(json: Value) -> Self { + match json { + Value::Null => Self::Null, + Value::Bool(x) => Self::Bool(x), + Value::Number(x) => Self::Number(x), + Value::String(x) => Self::String(x), + Value::Array(x) => Self::Array(x.into_iter().map(|x| TValue::from_json(x)).collect()), + Value::Object(x) => Self::Object(TMap { + map: x.into_iter().map(|(x, y)| (x, TValue::from_json(y))).collect() + }), + } + } + + /// Convert to JSON using derived Serialize instance + pub fn to_json(&self) -> Result { + serde_json::to_value(self) + .map_err(|e| TValueError::SerdeJsonError(Arc::new(e))) + } + + /// Resolve all of the (Var)'s using the given variables. + /// + /// For example, if the Map includes the association ("foo", "bar"), + /// all occurences of Var("foo") will be replaced with "bar". + pub fn run(self, variables: Map) -> Result { + let self_copy = self.clone(); + match self { + Self::Null => Ok(Value::Null), + Self::Bool(x) => Ok(Value::Bool(x)), + Self::Number(x) => Ok(Value::Number(x)), + Self::String(x) => Ok(Value::String(x)), + Self::Array(x) => Ok(Value::Array(x.into_iter().map(|y| y.run(variables.clone())).collect::, TValueRunError>>()?)), + Self::Object(x) => Ok(Value::Object(x.map.into_iter().map(|(y, z)| Ok((y, z.run(variables.clone())?))).collect::, TValueRunError>>()?)), + Self::Var(x) => { + variables.get(&x) + .map(|y| y.clone()) + .ok_or_else(|| TValueRunError { + variable: x, + value: vec![self_copy], + variables: variables, + }) + }, + } + } +} + +#[derive(Clone, Debug, Error)] +pub enum TValueError { + #[error("TValue::to_json:\n{0}")] + SerdeJsonError(Arc), +} + +/// A template that inclues an associated set of variables +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub struct Template { + /// Set of variables to resolve on the TValue + variables: Map, + + /// Template value + template: TValue, +} + +impl Template { + /// New template with an empty set of variables + pub fn new(template: TValue) -> Self { + Self { + variables: Map::new(), + template: template, + } + } + + /// Set the given variable name to the given Value + pub fn set(&mut self, name: String, value: Value) -> () { + self.variables.insert(name, value); + } + + /// Deserialize the Template from JSON and instantiate an empty set of variables + pub fn from_json(json: Value) -> Self { + Self::new(TValue::from_json(json)) + } + + /// Run the TValue given the provided variables + pub fn run(self) -> Result { + self.template.run(self.variables) + } +} diff --git a/src/lib.rs b/src/lib.rs index 34cc211..fa1ba47 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,93 +1,174 @@ + +//! Cryptoscript Rust Library +//! See cli for the command line interface + +#![warn(missing_docs, elided_lifetimes_in_paths, explicit_outlives_requirements, keyword_idents, missing_copy_implementations, missing_debug_implementations, non_ascii_idents, noop_method_call, single_use_lifetimes, trivial_casts, trivial_numeric_casts, unreachable_pub, unused_extern_crates, unused_import_braces, unused_lifetimes, unused_qualifications)] + +// #![warn(unused_crate_dependencies)] +// #![warn(unused_results)] + +#![deny(unsafe_code, unsafe_op_in_unsafe_fn)] + +mod restack; +pub use restack::{Restack, StackIx}; +mod arbitrary; +pub use arbitrary::{ArbitraryNumber, ArbitraryMap, ArbitraryValue}; +mod elem; +pub use elem::{Elem, ElemSymbol}; +mod elem_type; +pub use elem_type::{ElemType, StackType}; +mod an_elem; +pub use an_elem::{AnElem, AnElemError}; +mod an_elem_return; +pub use an_elem_return::Return; +mod location; +pub use location::{ArgumentIndex, LineNo}; +mod stack; +pub use stack::{Stack, StackError}; + +mod types; +pub use types::empty::Empty; +pub use types::context::{Context, ContextError}; +pub use types::type_id::TypeId; +pub use types::type_id::map::{TypeIdMap, TypeIdMapError}; +pub use types::{Type, TypeError}; + +mod elems; +pub use elems::{Elems, ElemsPopError}; +mod elems_singleton; +pub use elems_singleton::Singleton; +mod elems_or; +pub use elems_or::Or; +mod elems_all; +pub use elems_all::AllElems; +mod elems_input; +pub use elems_input::IElems; +mod elems_input_output; +pub use elems_input_output::IOElems; +mod elems_input_output_singleton; +pub use elems_input_output_singleton::ReturnSingleton; +mod elems_input_output_or; +pub use elems_input_output_or::ReturnOr; +mod elems_list; +pub use elems_list::IsList; +mod elems_list_nil; +pub use elems_list_nil::Nil; +mod elems_list_cons; +pub use elems_list_cons::{Cons, IterCons}; +mod elems_list_input; +pub use elems_list_input::IList; +mod elems_list_input_output; +pub use elems_list_input_output::IOList; +mod elems_list_input_output_cons; +pub use elems_list_input_output_cons::ConsOut; +mod json_template; +pub use json_template::{TMap, TValue, TValueRunError, Template}; +mod query; +pub use query::{QueryTemplate, QueryTemplates, Query, QueryType, QueryError}; +mod untyped_instruction; +pub use untyped_instruction::Instruction; +mod untyped_instructions; +pub use untyped_instructions::Instructions; +mod typed_instruction; +pub use typed_instruction::IsInstructionT; +mod typed_instructions; +pub use typed_instructions::{AssertTrue, Concat, Push, Lookup, UnpackJson, Index, CheckEq, BytesEq, StringEq, CheckLe, CheckLt, StringToBytes, ToJson, Slice, HashSha256}; +mod typed_instr; +pub use typed_instr::Instr; +mod typed_instrs; +pub use typed_instrs::Instrs; mod parse; -pub use parse::{parse, Instructions}; -mod executor; -pub use executor::Executor; - -use generic_array::{typenum::U32, GenericArray}; -use hex::encode as hex_encode; -use hex_literal::hex; -use sha2::{Digest, Sha256, Sha512}; -use sha3::{Digest as Sha3_Digest, Sha3_256}; - -/** - * Types: - * - UnsignedInteger - * - Integer - * - Float64 - * - Bytes(N) - * - Multibase - * - Multihash - * - Multiaddr - * - KeyTypes={Ed25519,Secp256k1,Secp256r1,Bls12_381} - * - PublicKey(KeyType) - * - PrivateKey(KeyType) - * - JWT - * - JWS - * - JWE - * - LDP - * - JSON - * - CBOR - * - * Functions - * - Sign :: Bytes(N) -> PrivateKey(KeyType) => Bytes(SignatureSize[KeyType]) - * - VerifySignature :: Bytes(N) -> Bytes(SignatureSize[KeyType]) -> PublicKey(KeyType) => Boolean - * - VerifyRecoveredSignature :: Bytes(N) -> Bytes(SignatureSize[KeyType]) => Boolean - * - HashSha3_256 :: Bytes(N) => Bytes(32) - * - Equal - * - AssertTrue - * - * Example - * push b"I am the walrus."; - * hash_sha256; - * push 0x475b03e74f7ee448273dbde5ab892746c7b23a2b4d050ccb7d9270b6fb152b72; - * check_equal; - * assert_true; - * - * Example - * setup { - * push b"I am the walrus."; - * } - * challenge { - * hash_sha256; - * push 0x475b03e74f7ee448273dbde5ab892746c7b23a2b4d050ccb7d9270b6fb152b72; - * check_equal; - * assert_true; - * } - */ - -fn sha256(input: &Vec) -> GenericArray { - // create a Sha256 object - let mut hasher = Sha256::new(); +pub use parse::{parse, parse_json}; - // write input message - hasher.update(input); +mod rest_api; +pub use rest_api::Api; +mod cli; +pub use cli::Cli; - // read hash digest and consume hasher - let result = hasher.finalize(); - return result; -} +use sha2::{Digest, Sha256}; + +// /** +// * Types: +// * - UnsignedInteger +// * - Integer +// * - Float64 +// * - Bytes(N) +// * - Multibase +// * - Multihash +// * - Multiaddr +// * - KeyTypes={Ed25519,Secp256k1,Secp256r1,Bls12_381} +// * - PublicKey(KeyType) +// * - PrivateKey(KeyType) +// * - JWT +// * - JWS +// * - JWE +// * - LDP +// * - JSON +// * - CBOR +// * +// * Functions +// * - Sign :: Bytes(N) -> PrivateKey(KeyType) => Bytes(SignatureSize[KeyType]) +// * - VerifySignature :: Bytes(N) -> Bytes(SignatureSize[KeyType]) -> PublicKey(KeyType) => Boolean +// * - VerifyRecoveredSignature :: Bytes(N) -> Bytes(SignatureSize[KeyType]) => Boolean +// * - HashSha3_256 :: Bytes(N) => Bytes(32) +// * - Equal +// * - AssertTrue +// * +// * Example +// * push b"I am the walrus."; +// * hash_sha256; +// * push 0x475b03e74f7ee448273dbde5ab892746c7b23a2b4d050ccb7d9270b6fb152b72; +// * check_equal; +// * assert_true; +// * +// * Example +// * setup { +// * push b"I am the walrus."; +// * } +// * challenge { +// * hash_sha256; +// * push 0x475b03e74f7ee448273dbde5ab892746c7b23a2b4d050ccb7d9270b6fb152b72; +// * check_equal; +// * assert_true; +// * } +// */ -fn sha3_256(input: &Vec) -> GenericArray { +fn sha256(input: &Vec) -> Vec { // create a Sha256 object - let mut hasher = Sha3_256::new(); + let mut hasher = Sha256::new(); // write input message hasher.update(input); // read hash digest and consume hasher let result = hasher.finalize(); - return result; -} - -fn drop_bytes(n: usize, input: &Vec) -> Vec { - let mut result = input.clone(); - result.drain(..n); - return result; + return result.to_vec(); } #[cfg(test)] mod tests { use super::*; + use generic_array::{typenum::U32, GenericArray}; + use hex_literal::hex; + use sha3::{Digest as Sha3_Digest, Sha3_256}; + + fn sha3_256(input: &Vec) -> GenericArray { + // create a Sha256 object + let mut hasher = Sha3_256::new(); + + // write input message + hasher.update(input); + + // read hash digest and consume hasher + let result = hasher.finalize(); + return result; + } + + fn drop_bytes(n: usize, input: &Vec) -> Vec { + let mut result = input.clone(); + result.drain(..n); + return result; + } #[test] fn test_sha2() { diff --git a/src/location.rs b/src/location.rs new file mode 100644 index 0000000..e15fe30 --- /dev/null +++ b/src/location.rs @@ -0,0 +1,54 @@ +use serde::{Deserialize, Serialize}; + +/// Line number, 0-indexed +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)] +pub struct LineNo { + /// Line number + pub line_no: usize, +} + +impl From for LineNo { + fn from(line_no: usize) -> Self { + LineNo { + line_no: line_no, + } + } +} + +/// Index of an argument, e.g. Concat has two arguments with indices +/// [0, 1], in that order +pub type ArgumentIndex = usize; + +/// Location of an input or output +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)] +pub struct Location { + line_no: LineNo, + argument_index: ArgumentIndex, + is_input: bool, +} + +impl LineNo { + /// From::from(2).in_at(3) is the position: + /// - line_no: 2 + /// - argument_index: 3 + /// - is_input: true + pub fn in_at(&self, argument_index: usize) -> Location { + Location { + line_no: *self, + argument_index: argument_index, + is_input: true, + } + } + + /// From::from(2).out_at(3) is the position: + /// - line_no: 2 + /// - argument_index: 3 + /// - is_input: false + pub fn out_at(&self, argument_index: usize) -> Location { + Location { + line_no: *self, + argument_index: argument_index, + is_input: false, + } + } +} diff --git a/src/main.rs b/src/main.rs index eab9c35..ba6cec8 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,17 +1,415 @@ -use cryptoscript::{parse, Executor}; - -fn main() { - let instructions = parse( - r#" - push b"I am the walrus."; - hash_sha256; - push 0x475b03e74f7ee448273dbde5ab892746c7b23a2b4d050ccb7d9270b6fb152b72; - check_equal; - assert_true; - "#, - ) - .expect("failed to parse the input"); - Executor::default() - .consume(instructions) - .expect("error processing instructions"); +use cryptoscript::{parse_json, Elem, ElemSymbol, Instruction, Instructions}; +use cryptoscript::{Restack, Instrs}; +use cryptoscript::{AssertTrue, Push, Lookup, UnpackJson, Index, StringEq}; +use cryptoscript::{Cli}; +use cryptoscript::{TMap, TValue, Template}; +// use cryptoscript::{Query, QueryType}; + +use cryptoscript::{Api}; + +use std::marker::PhantomData; + +// use indexmap::IndexMap; +use clap::{Parser}; +use serde_json::{Map, Number, Value}; + +// TODO: migrate test to current version +// +// #[cfg(test)] +// mod tests { +// use super::*; +// use cryptoscript::{parse}; + +// #[test] +// fn test_parse_exec() { +// let instructions = parse( +// r#" +// push b"I am the walrus."; +// hash_sha256; +// push 0x475b03e74f7ee448273dbde5ab892746c7b23a2b4d050ccb7d9270b6fb152b72; +// check_equal; +// assert_true; +// "#, +// ) +// .expect("failed to parse the input"); +// Executor::default() +// .consume(instructions) +// .expect("error processing instructions"); +// } +// } + +#[tokio::main] +async fn main() { + + let _input_json = r#" + { + "queries": [ + { + "uri": "https://api.etherscan.io/api", + "module": "account", + "action": "tokenbalance", + "contractaddress": "0x57d90b64a1a57749b0f932f1a3395792e12e7055", + "address": "0xe04f27eb70e025b78871a2ad7eabe85e61212761", + "tag": "latest", + "blockno": "8000000", + "apikey": "YourApiKeyToken", + "response": + { + "status": "1", + "message": "OK", + "result": "135499" + } + } + ], + "prompts": [ + { + "action": "siwe", + "version": "1.1.0", + "data": { + "message": "service.org wants you to sign in with your Ethereum account:\n0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2\n\nI accept the ServiceOrg Terms of Service: https://service.org/tos\n\nURI: https://service.org/login\nVersion: 1\nChain ID: 1\nNonce: 32891757\nIssued At: 2021-09-30T16:25:24.000Z\nResources:\n- ipfs://Qme7ss3ARVgxv6rXqVPiikMJ8u2NLgmgszg13pYrDKEoiu\n- https://example.com/my-web2-claim.json", + "fields": { + "domain": "service.org", + "address": "0xe04f27eb70e025b78871a2ad7eabe85e61212761", + "statement": "I accept the ServiceOrg Terms of Service: https://service.org/tos", + "uri": "https://service.org/login", + "version": "1", + "chainId": 1, + "nonce": "32891757", + "issuedAt": "2021-09-30T16:25:24.000Z", + "resources": ["ipfs://Qme7ss3ARVgxv6rXqVPiikMJ8u2NLgmgszg13pYrDKEoiu", "https://example.com/my-web2-claim.json"] + } + } + } + ] + } + "#; + + let instructions_vec: Vec = vec![ + // TEST #1 + // Instruction::Push(Elem::Bool(true)), + // Instruction::Restack(Restack::id()), + // Instruction::AssertTrue, + + // FOR DEBUGGING TYPER + // Instruction::Push(Elem::Json(Default::default())), + + Instruction::UnpackJson(ElemSymbol::Object), + Instruction::Restack(Restack::dup()), + + // x["queries"] + Instruction::Push(Elem::String("queries".to_string())), + Instruction::Lookup, + Instruction::UnpackJson(ElemSymbol::Array), + + // x[0] + Instruction::Push(Elem::Number(From::from(0u8))), + Instruction::Index, + Instruction::UnpackJson(ElemSymbol::Object), + + // x["action"] = "tokenbalance" + Instruction::Restack(Restack::dup()), + Instruction::Push(Elem::String("action".to_string())), + Instruction::Lookup, + Instruction::UnpackJson(ElemSymbol::String), + Instruction::Push(Elem::String("tokenbalance".to_string())), + Instruction::StringEq, + Instruction::AssertTrue, + Instruction::Restack(Restack::drop()), + + // x["contractaddress"] = "0x57d90b64a1a57749b0f932f1a3395792e12e7055" + Instruction::Restack(Restack::dup()), + Instruction::Push(Elem::String("contractaddress".to_string())), + Instruction::Lookup, + Instruction::UnpackJson(ElemSymbol::String), + Instruction::Push(Elem::String("0x57d90b64a1a57749b0f932f1a3395792e12e7055".to_string())), + Instruction::StringEq, + Instruction::AssertTrue, + Instruction::Restack(Restack::drop()), + + // x["response"]["result"] = "135499" + Instruction::Restack(Restack::dup()), + Instruction::Push(Elem::String("response".to_string())), + Instruction::Lookup, + Instruction::UnpackJson(ElemSymbol::Object), + Instruction::Push(Elem::String("result".to_string())), + Instruction::Lookup, + Instruction::UnpackJson(ElemSymbol::String), + Instruction::Push(Elem::String("135499".to_string())), + Instruction::StringEq, + Instruction::AssertTrue, + Instruction::Restack(Restack::drop()), + + // x["prompts"] + Instruction::Restack(Restack::drop()), + Instruction::Push(Elem::String("prompts".to_string())), + Instruction::Lookup, + Instruction::UnpackJson(ElemSymbol::Array), + + // x[0] + Instruction::Push(Elem::Number(From::from(0u8))), + Instruction::Index, + Instruction::UnpackJson(ElemSymbol::Object), + + // x["action"] = "siwe" + Instruction::Restack(Restack::dup()), + Instruction::Push(Elem::String("action".to_string())), + Instruction::Lookup, + Instruction::UnpackJson(ElemSymbol::String), + Instruction::Push(Elem::String("siwe".to_string())), + Instruction::StringEq, + Instruction::AssertTrue, + Instruction::Restack(Restack::drop()), + + // x["version"] = "1.1.0" + Instruction::Restack(Restack::dup()), + Instruction::Push(Elem::String("version".to_string())), + Instruction::Lookup, + Instruction::UnpackJson(ElemSymbol::String), + Instruction::Push(Elem::String("1.1.0".to_string())), + Instruction::StringEq, + Instruction::AssertTrue, + Instruction::Restack(Restack::drop()), + + // x["data"]["fields"]["address"] = "0xe04f27eb70e025b78871a2ad7eabe85e61212761" + Instruction::Restack(Restack::dup()), + Instruction::Push(Elem::String("data".to_string())), + Instruction::Lookup, + Instruction::UnpackJson(ElemSymbol::Object), + Instruction::Push(Elem::String("fields".to_string())), + Instruction::Lookup, + Instruction::UnpackJson(ElemSymbol::Object), + Instruction::Push(Elem::String("address".to_string())), + Instruction::Lookup, + Instruction::UnpackJson(ElemSymbol::String), + Instruction::Push(Elem::String("0xe04f27eb70e025b78871a2ad7eabe85e61212761".to_string())), + Instruction::StringEq, + Instruction::AssertTrue, + Instruction::Restack(Restack::drop()), + + // sha256(x["data"]["message"]) + Instruction::Restack(Restack::dup()), + Instruction::Push(Elem::String("data".to_string())), + Instruction::Lookup, + Instruction::UnpackJson(ElemSymbol::Object), + Instruction::Push(Elem::String("message".to_string())), + Instruction::Lookup, + Instruction::UnpackJson(ElemSymbol::String), + Instruction::StringToBytes, + Instruction::HashSha256, + + // sha256(x["data"]["fields"]["address"]) + Instruction::Restack(Restack::swap()), + Instruction::Push(Elem::String("data".to_string())), + Instruction::Lookup, + Instruction::UnpackJson(ElemSymbol::Object), + Instruction::Push(Elem::String("fields".to_string())), + Instruction::Lookup, + Instruction::UnpackJson(ElemSymbol::Object), + Instruction::Push(Elem::String("address".to_string())), + Instruction::Lookup, + Instruction::UnpackJson(ElemSymbol::String), + Instruction::StringToBytes, + Instruction::HashSha256, + + // sha256(sha256(x["data"]["message"]) ++ sha256(x["data"]["fields"]["address"])) = + // [53,163,178,139,122,187,171,47,42,135,175,176,240,11,10,152,228,238,106,205,132,68,80,79,188,54,124,242,97,132,31,139] + Instruction::Concat, + Instruction::HashSha256, + Instruction::Push(Elem::Bytes(vec![53,163,178,139,122,187,171,47,42,135,175,176,240,11,10,152,228,238,106,205,132,68,80,79,188,54,124,242,97,132,31,139])), + Instruction::BytesEq, + Instruction::AssertTrue, + Instruction::Restack(Restack::drop()), + ]; + let instructions = Instructions { + instructions: instructions_vec, + }; + + let json_instructions = serde_json::to_string_pretty(&serde_json::to_value(instructions.clone()).unwrap()).unwrap(); + assert_eq!(parse_json(&json_instructions).unwrap(), instructions); + + // match instructions.type_of() { + // Ok(r) => println!("\nfinal type:\n{}", r), + // Err(e) => println!("{}", e), + // } + + let mut instructions_vec_t_1 = Instrs::new(); + instructions_vec_t_1.instr(Push { push: true }); + instructions_vec_t_1.restack(Restack::id()); + instructions_vec_t_1.instr(AssertTrue {}); + + // let mut stack = Stack::new(); + // let input_json_value: serde_json::Value = serde_json::from_str(input_json).unwrap(); + // stack.push_elem(input_json_value); + + // println!("{:?}", instructions_vec_t_1.run(&mut stack)); + // println!("FINAL STACK"); + // println!("{:?}", stack); + + let mut instructions_vec_t_2 = Instrs::new(); + + // x["queries"] + instructions_vec_t_2.instr(UnpackJson { t: PhantomData::> }); + instructions_vec_t_2.instr(Push { push: "queries".to_string() }); + instructions_vec_t_2.instr(Lookup {}); + instructions_vec_t_2.instr(UnpackJson { t: PhantomData::> }); + + // x[0] + let zero: Number = From::from(0u8); + instructions_vec_t_2.instr(Push { push: zero }); + instructions_vec_t_2.instr(Index {}); + instructions_vec_t_2.instr(UnpackJson { t: PhantomData::> }); + + // x["action"] = "tokenbalance" + instructions_vec_t_2.restack(Restack::dup()); + instructions_vec_t_2.instr(Push { push: "action".to_string() }); + instructions_vec_t_2.instr(Lookup {}); + instructions_vec_t_2.instr(UnpackJson { t: PhantomData:: }); + instructions_vec_t_2.instr(Push { push: "tokenbalance".to_string() }); + instructions_vec_t_2.instr(StringEq {}); + instructions_vec_t_2.instr(AssertTrue {}); + instructions_vec_t_2.restack(Restack::drop()); + + // x["contractaddress"] = "0x57d90b64a1a57749b0f932f1a3395792e12e7055" + instructions_vec_t_2.restack(Restack::dup()); + instructions_vec_t_2.instr(Push { push: "contractaddress".to_string() }); + instructions_vec_t_2.instr(Lookup {}); + instructions_vec_t_2.instr(UnpackJson { t: PhantomData:: }); + instructions_vec_t_2.instr(Push { push: "0x57d90b64a1a57749b0f932f1a3395792e12e7055".to_string() }); + instructions_vec_t_2.instr(StringEq {}); + instructions_vec_t_2.instr(AssertTrue {}); + instructions_vec_t_2.restack(Restack::drop()); + + // let mut stack = Stack::new(); + // let input_json_value: serde_json::Value = serde_json::from_str(input_json).unwrap(); + // stack.push_elem(input_json_value); + + // println!("instructions:"); + // for instruction in &instructions_vec_t_2.instrs { + // println!("{:?}", instruction); + // } + // println!(""); + + // match instructions_vec_t_2.run(&mut stack) { + // Ok(()) => (), + // Err(e) => println!("failed:\n{}\n", e), + // } + + + + + + + // let mut stack = Stack::new(); + // let input_json_value: serde_json::Value = serde_json::from_str(input_json).unwrap(); + // stack.push_elem(input_json_value); + + // println!("instructions:"); + // for instruction in instructions.clone() { + // println!("{:?}", instruction); + // } + // println!(""); + + // let instructions_vec_t_3 = match instructions.to_instrs() { + // Ok(instructions_vec_t) => instructions_vec_t, + // Err(e) => { + // println!("Instructions::to_instrs() failed:\n{}", e); + // panic!("Instructions::to_instrs() failed:\n{}", e) + // }, + // }; + + // match instructions_vec_t_3.run(&mut stack) { + // Ok(()) => (), + // Err(e) => println!("failed:\n{}\n", e), + // } + + + + + + + + println!(""); + println!(""); + // println!("Template test:"); + + // ERC-20 token balance (currently) + // GET + // https://api.etherscan.io/api + // ?module=account + // &action=tokenbalance + // &contractaddress=0x57d90b64a1a57749b0f932f1a3395792e12e7055 + // &address=0xe04f27eb70e025b78871a2ad7eabe85e61212761 + // &tag=latest + // &apikey=YourApiKeyToken + + let erc20_request_json = r#" + { + "module": "account", + "action": "tokenbalance", + "contractaddress": "0x57d90b64a1a57749b0f932f1a3395792e12e7055", + "address": "0xe04f27eb70e025b78871a2ad7eabe85e61212761", + "tag": "latest", + "apikey": "4JGE3TQ3ZAGAM7IK86M24DY2H4EH1AIAZ" + } + "#; + let erc20_response_json = r#" + { + "status":"1", + "message":"OK", + "result":"135499" + } + "#; + let erc20_request = serde_json::from_str(erc20_request_json).unwrap(); + let erc20_response = serde_json::from_str(erc20_response_json).unwrap(); + + let erc20_rate_limit_seconds = 1; + let erc20_api: Api = Api::new(erc20_request, erc20_response, erc20_rate_limit_seconds); + let erc20_api_json: serde_json::Value = serde_json::to_value(erc20_api).unwrap(); + let erc20_api_template = Template::from_json(erc20_api_json); + let _erc20_api_template_json = serde_json::to_string_pretty(&serde_json::to_value(erc20_api_template.clone()).unwrap()).unwrap(); + + // println!("ERC-20:"); + // println!("{}", erc20_api_template_json); + // println!(""); + // println!(""); + + + // let mut variables = Map::new(); + // variables.insert("contractaddress".to_string(), Value::String("0x57d90b64a1a57749b0f932f1a3395792e12e7055".to_string())); + // variables.insert("address".to_string(), Value::String("0xe04f27eb70e025b78871a2ad7eabe85e61212761".to_string())); + // variables.insert("apikey".to_string(), Value::String("YourApiKeyToken".to_string())); + + let mut template = TMap::new(); + template.insert("type".to_string(), TValue::String("GET".to_string())); + template.insert("URL".to_string(), TValue::String("https://api.etherscan.io/api".to_string())); + + let mut query_parameters = TMap::new(); + query_parameters.insert("module".to_string(), TValue::String("account".to_string())); + query_parameters.insert("action".to_string(), TValue::String("tokenbalance".to_string())); + query_parameters.insert("contractaddress".to_string(), TValue::Var("contractaddress".to_string())); + query_parameters.insert("address".to_string(), TValue::Var("address".to_string())); + query_parameters.insert("tag".to_string(), TValue::String("latest".to_string())); + query_parameters.insert("apikey".to_string(), TValue::Var("apikey".to_string())); + template.insert("parameters".to_string(), TValue::Object(query_parameters.clone())); + + let mut full_template = Template::new(TValue::Object(template)); + full_template.set("contractaddress".to_string(), Value::String("0x57d90b64a1a57749b0f932f1a3395792e12e7055".to_string())); + full_template.set("address".to_string(), Value::String("0xe04f27eb70e025b78871a2ad7eabe85e61212761".to_string())); + full_template.set("apikey".to_string(), Value::String("YourApiKeyToken".to_string())); + + // let json_template = serde_json::to_string_pretty(&serde_json::to_value(full_template.clone()).unwrap()).unwrap(); + // println!("{}", json_template); + + // let query = Query { + // name: "erc20".to_string(), + // url: "https://api.etherscan.io/api".to_string(), + // template: TValue::Object(query_parameters), + // cached: true, + // query_type: QueryType::Get, + // }; + // let json_query = serde_json::to_string_pretty(&serde_json::to_value(query.clone()).unwrap()).unwrap(); + // println!("{}", json_query); + + let cli = Cli::parse(); + cli.run().await; } diff --git a/src/parse.rs b/src/parse.rs index 9307773..c6f490e 100644 --- a/src/parse.rs +++ b/src/parse.rs @@ -8,65 +8,56 @@ /// /// Where CHARS is any number of characters which aren't escaped double-quotes (\") and HEX is a 64 /// digit hexadecimal number. + +use crate::elem::Elem; +use crate::untyped_instruction::Instruction; +use crate::untyped_instructions::Instructions; + use std::str::FromStr; -use generic_array::{typenum::U32, GenericArray}; use thiserror::Error; -pub type Instructions = Vec; - -#[derive(Debug)] -pub enum Instruction { - Push(Elem), - FnHashSha256, - FnCheckEqual, - FnAssertTrue, +/// Parse a list of Instruction's using serde_json::from_str +pub fn parse_json(input: &str) -> Result { + match serde_json::from_str(&input) { + Err(serde_error) => Err(ParseError::SerdeJsonError(serde_error)), + Ok(instructions) => Ok(instructions), + } } +/// Parse a ";"-separated list of instructions, where individual Instruction's +/// are parsed with parse_instruction pub fn parse(input: &str) -> Result { - input - .split(';') - .map(|term| term.trim()) - .filter(|&term| !term.is_empty()) - .map(|term| parse_instruction(term)) - .collect() + Ok(Instructions { + instructions: + input + .split(';') + .map(|term| term.trim()) + .filter(|&term| !term.is_empty()) + .map(|term| parse_instruction(term)) + .collect::, ParseError>>()?, + }) } +/// Parse an individual Instruction fn parse_instruction(term: &str) -> Result { if let Some(rest) = term.strip_prefix("push") { return Ok(Instruction::Push(rest.trim().parse()?)); } match term { - "assert_true" => Ok(Instruction::FnAssertTrue), - "check_equal" => Ok(Instruction::FnCheckEqual), - "hash_sha256" => Ok(Instruction::FnHashSha256), + "assert_true" => Ok(Instruction::AssertTrue), + "check_equal" => Ok(Instruction::CheckEq), + "hash_sha256" => Ok(Instruction::HashSha256), _ => Err(ParseError::UnsupportedInstruction(term.to_string())), } } -#[derive(Debug, PartialEq)] -pub enum Elem { - Bool(bool), - Bytes32(GenericArray), - BytesN(Vec), -} - -impl Elem { - pub fn simple_type(&self) -> &'static str { - match self { - Self::Bool(_) => "bool", - Self::Bytes32(_) => "Bytes(32)", - Self::BytesN(_) => "Bytes(N)", - } - } -} - impl FromStr for Elem { type Err = ParseError; fn from_str(s: &str) -> Result { match s.as_bytes() { [b'b', b'"', inner @ .., b'"'] => { - return Ok(Elem::BytesN(inner.to_vec())); + return Ok(Elem::Bytes(inner.to_vec())); } [b'0', b'x', hex_digits @ ..] => { if hex_digits.len() != 64 { @@ -97,15 +88,7 @@ impl FromStr for Elem { }, )?; - if let Some(array) = GenericArray::from_exact_iter(bytes) { - return Ok(Elem::Bytes32(array)); - } else { - use std::hint::unreachable_unchecked; - // if the 'bytes' vec has been constructed without error, then it is 32 bytes - // long, as the hex_digits slice is checked to be 64 digits long, and each pair - // of digits is used to make one byte. - unsafe { unreachable_unchecked() } - } + return Ok(Elem::Bytes(bytes)) } // No need to support booleans, but it is trivial to do so. _ => Err(ParseError::UnsupportedElem(s.to_string())), @@ -123,4 +106,7 @@ pub enum ParseError { UnsupportedElem(String), #[error("instruction is malformed or cannot be parsed in this context")] UnsupportedInstruction(String), + #[error("error from serde_json ({0})")] + SerdeJsonError(serde_json::Error), } + diff --git a/src/query.rs b/src/query.rs new file mode 100644 index 0000000..5970f0a --- /dev/null +++ b/src/query.rs @@ -0,0 +1,250 @@ +use crate::json_template::{TValue, TValueRunError}; + +use std::fs; +use std::path::PathBuf; +use std::sync::Arc; + +use reqwest::Client; +use serde::{Deserialize, Serialize}; +use serde_json::{Map, Value}; +use thiserror::Error; +use tokio_stream::{self as stream, StreamExt}; + +/// HTTP request type +#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub enum QueryType { + /// GET request + Get, + /// PUT request + Put, +} + +/// A Query template, see Query for additional fields required to run it. +/// This struct is deserialized from an input file by the CLI. +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub struct QueryTemplate { + /// Query name, used for caching, display, and is exposed in the result + pub name: String, + + /// Query URL + pub url: String, + + /// Query JSON template + pub template: TValue, + + /// Whether the result should be cached + pub cached: bool, + + /// HTTP request type + pub query_type: QueryType, +} + +/// Error encountered when running a Query +#[derive(Clone, Debug, Error)] +pub enum QueryError { + /// The value is not cached + #[error("Query::get_cached: value not cached:\n{name:?}\n{url:?}")] + NotCached { + /// Query name + name: String, + /// Request URL + url: String, + }, + + /// Running the reqwest request failed + #[error("Query::run: request failed:\nresponse:\n{response}")] + RequestFailed { + /// Response pretty-printed JSON + response: String, + }, + + /// Error when running query TValue + #[error("TValueRunError:\n{0:?}")] + TValueRunError(TValueRunError), + + /// reqwest::Error + #[error("ReqwestError:\n{0}")] + ReqwestError(Arc), + + /// std::io::Error + #[error("StdIoError:\n{0}")] + StdIoError(Arc), + + /// serde_json::Error + #[error("SerdeJsonError:\n{0}")] + SerdeJsonError(Arc), +} + +impl From for QueryError { + fn from(error: TValueRunError) -> Self { + Self::TValueRunError(error) + } +} + +impl From for QueryError { + fn from(error: reqwest::Error) -> Self { + Self::ReqwestError(Arc::new(error)) + } +} + +impl From for QueryError { + fn from(error: std::io::Error) -> Self { + Self::StdIoError(Arc::new(error)) + } +} + +impl From for QueryError { + fn from(error: serde_json::Error) -> Self { + Self::SerdeJsonError(Arc::new(error)) + } +} + +impl QueryTemplate { + /// Convert to a Value + pub fn to_json(&self) -> Result { + Ok(serde_json::to_value(self)?) + } + + /// Convert to a Query with the given variables, cache_location, resp. + pub fn to_query(self, variables: Arc>, cache_location: Arc) -> Query { + Query { + query_template: self, + variables: variables, + cache_location: cache_location, + } + } +} + +/// QueryTemplate with variables to instantiate it with and a cache location +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct Query { + query_template: QueryTemplate, + variables: Arc>, + cache_location: Arc, +} + +impl Query { + /// Index string to cache this Query + pub fn cache_index(&self) -> String { + format!("{:?}:{:?}", self.query_template.name, self.variables) + } + + /// Get the Query::cache_index value at the given cache_location + pub async fn get_cached(&self) -> Result { + if self.query_template.cached { + println!("Checking cache: {:?}", self.cache_location.clone()); + let cache_str = fs::read_to_string((*self.cache_location).clone())?; + let cache: Map = serde_json::from_str(&cache_str)?; + cache.get(&self.cache_index()).ok_or_else(|| { + QueryError::NotCached { + name: self.query_template.name.clone(), + url: self.query_template.url.clone(), + }}).map(|x| x.clone()) + } else { + Err(QueryError::NotCached { + name: self.query_template.name.clone(), + url: self.query_template.url.clone(), + }) + } + } + + /// Put the given result Value in the given cache_location at Query::cache_index, + /// overwriting any existing cached result + pub async fn put_cached(&self, result: Value) -> Result<(), QueryError> { + if self.query_template.cached { + println!("Adding to cache: {:?}", self.cache_location.clone()); + let mut cache: Map = if self.cache_location.as_path().exists() { + let cache_str = fs::read_to_string((*self.cache_location).clone())?; + serde_json::from_str(&cache_str)? + } else { + Map::new() + }; + cache.insert(self.cache_index(), result); + let cache_json = serde_json::to_string_pretty(&serde_json::to_value(cache).unwrap()).unwrap(); + fs::write((*self.cache_location).clone(), cache_json)?; + Ok(()) + } else { + println!("Not cached"); + Ok(()) + } + } + + /// Run queries by: + /// 1. Instantiating the template with the given variables + /// 2. Converting the template to JSON + /// 3. Looking up the query in the cache + /// 4. If not found, dispatch along QueryType, sending using reqwest + /// 5. Cache response if successful + pub async fn run(&self) -> Result { + println!("Running Query \"{}\" at \"{}\"", self.query_template.name, self.query_template.url); + let ran_template = self.clone().query_template.template.run((*self.variables).clone())?; + match serde_json::to_value(ran_template.clone()).and_then(|x| serde_json::to_string_pretty(&x)) { + Ok(json) => println!("{}\n", json), + Err(e) => println!("Printing query template failed: {}", e), + } + match self.clone().get_cached().await { + Ok(result) => { + println!("Got cached result..\n"); + Ok(result) + }, + Err(_e) => { + let client = Client::new(); + let request_builder = match self.query_template.query_type { + QueryType::Get => { + client.get(self.query_template.url.clone()) + }, + QueryType::Put => { + client.put(self.query_template.url.clone()) + }, + }; + let response = request_builder + .json(&ran_template) + .send() + .await + .map_err(|e| QueryError::ReqwestError(Arc::new(e)))?; + if response.status().is_success() { + let result: Value = response.json() + .await + .map_err(|e| QueryError::ReqwestError(Arc::new(e)))?; + self.put_cached(result.clone()).await?; + Ok(result) + } else { + let response_text = response.text() + .await + .unwrap_or_else(|e| format!("error: \n{}", e)); + Err(QueryError::RequestFailed { + response: response_text, + }) + } + }, + } + } +} + +/// An ordered series of QueryTemplates +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub struct QueryTemplates { + queries: Vec, +} + +impl QueryTemplates { + /// Number of queries + pub fn len(&self) -> usize { + self.queries.len() + } + + /// Run a list of QueryTemplate's, in series, and collect their results + pub async fn run(self, variables: Arc>, cache_location: Arc) -> Result>, QueryError> { + let mut result = Vec::with_capacity(self.queries.len()); + let mut stream = stream::iter(self.queries); + while let Some(query_template) = stream.next().await { + let query_json = query_template.to_json()?; + let query_result = query_template.to_query(variables.clone(), cache_location.clone()).run().await?; + let mut query_result_json = Map::new(); + query_result_json.insert("query".to_string(), query_json); + query_result_json.insert("result".to_string(), query_result); + result.push(query_result_json) + } + Ok(result) + } +} diff --git a/src/rest_api.rs b/src/rest_api.rs new file mode 100644 index 0000000..8c2d9e2 --- /dev/null +++ b/src/rest_api.rs @@ -0,0 +1,198 @@ +use std::time::SystemTime; +use std::sync::{Arc, Mutex}; + +use actix_web::{get, put, web, App, HttpResponse, HttpServer, Responder}; +use indexmap::IndexMap; +use serde_json::{Map, Value}; +use serde::{Deserialize, Serialize}; + +/// GET REST Api, located at 'apis/{name}' +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub struct Api { + /// Constant required request JSON + request: Value, + /// Constant response JSON + response: Value, + /// Number of seconds required between queries + rate_limit_seconds: u64, + /// Time of last API call + last_api_call: Option, +} + +impl Api { + /// Create a new Api that's never been called + pub fn new(request: Value, response: Value, rate_limit_seconds: u64) -> Self { + Api { + request: request, + response: response, + rate_limit_seconds: rate_limit_seconds, + last_api_call: None, + } + } + + /// Fail if rate_limit_seconds > elapsed_seconds since last called_now + pub fn check_rate_limit(&self) -> Result<(), String> { + match self.last_api_call { + None => Ok(()), + Some(last_call_time) => { + let elapsed_seconds = last_call_time + .elapsed() + .map_err(|e| format!("internal SystemTime error: {:?}", e))? + .as_secs(); + if self.rate_limit_seconds <= elapsed_seconds { + Ok(()) + } else { + Err(format!("rate limit exceeded:\n{} seconds since last call, but need {} seconds", + elapsed_seconds, + self.rate_limit_seconds)) + } + }, + } + } + + /// Update last_api_call + pub fn called_now(&self) -> Self { + Self { + request: self.request.clone(), + response: self.response.clone(), + rate_limit_seconds: self.rate_limit_seconds, + last_api_call: Some(SystemTime::now()), + } + } +} + +/// All of the supported API's +#[derive(Clone, Debug)] +struct AppState { + apis: Arc>>, +} + +impl AppState { + // NOTE: unclear why this non-dead code is detected as dead, + // perhaps because it's inside #[actix_web::main]? + #[allow(dead_code)] + /// New AppState with empty set of apis + fn new() -> Self { + Self { + apis: Arc::new(Mutex::new(IndexMap::new())), + } + } + + /// Add an API. Its path will be '/apis/name' + fn api(&self, name: String, api: Api) -> Result<(), String> { + println!("Adding API \"{}\":", name); + match serde_json::to_value(api.clone()).and_then(|x| serde_json::to_string_pretty(&x)) { + Ok(json) => println!("{}", json), + Err(e) => println!("Printing API failed: {}", e), + } + self.apis.lock() + .map_err(|e| format!("Acquiring lock failed:\n{}", e))? + .insert(name, api); + Ok(()) + } +} + +#[get("/")] +async fn index() -> impl Responder { + let body_str = r#" + Routes: + - / + - /apis + "#; + HttpResponse::Ok().body(body_str) +} + +#[get("/apis")] +async fn index_apis(data: web::Data) -> impl Responder { + let json_body: Result, String> = data.apis + .lock() + .map_err(|e| format!("{}", e)) + .and_then(|x| { x + .clone() + .into_iter() + .map(|(x, y)| Ok((x, serde_json::to_value(y).map_err(|e| format!("{}", e))?))) + .collect::, String>>() + }); + let pretty_json = serde_json::to_string_pretty(&json_body.unwrap()).unwrap(); + HttpResponse::Ok().body(pretty_json) +} + +#[get("/apis/{api_id}")] +async fn get_api(path: web::Path, data: web::Data, query: web::Json) -> impl Responder { + let path_str: String = path.into_inner(); + match data.apis.lock().map_err(|e| format!("{}", e)) { + Ok(mut apis) => { + println!("DEBUG:\npath:\n{}\napis:\n{:?}\nquery\n{}", path_str, apis, query); + let json_response = apis.clone().get(&path_str) + .ok_or_else(|| format!("API not found: {:?}", path_str)) + .and_then(|api| api.check_rate_limit().map(|_| api)) + .and_then(|api| { + if api.request == query.clone() { + let new_api = api.called_now(); + apis.insert(path_str, new_api); + Ok(api.response.clone()) + } else { + Err(format!("unexpected request JSON, expected:\n \"{}\"", api.request)) + } + }); + match json_response { + Ok(response) => { + println!("response: {}", response); + HttpResponse::Ok().json(response) + }, + Err(ref e) => { + println!("error: {}", e); + HttpResponse::BadRequest().json((e.clone(), json_response, query)) + }, + } + }, + Err(e) => + HttpResponse::NotFound().body(format!("GET /apis/{} failed:\n{}", path_str, e)), + } +} + +#[put("/apis/{api_id}")] +async fn put_api(path: web::Path, data: web::Data, request: web::Json) -> impl Responder { + match data.api(path.clone(), request.into_inner()) { + Ok(()) => HttpResponse::Ok() + .json(format!("API added: /apis/{}", path.clone())), + Err(e) => HttpResponse::InternalServerError().json(e), + } +} + +#[actix_web::main] +async fn main() -> std::io::Result<()> { + let server_root = "127.0.0.1"; + let server_port = 8080; + let server_address = format!("http://{}:{}", server_root, server_port); + println!("Starting server.."); + println!("- {} root", server_address); + println!("- {}/apis API's root", server_address); + + let app_state = AppState::new(); + app_state.api("got_null".to_string(), Api { + request: Value::Null, + response: Value::String("Got null!".to_string()), + rate_limit_seconds: 1, + last_api_call: None, + }).map_err(|e| std::io::Error::new(std::io::ErrorKind::WouldBlock, e))?; + + app_state.api("got_number".to_string(), Api { + request: Value::Number(From::from(0u8)), + response: Value::String("Got 0, as expected!".to_string()), + rate_limit_seconds: 1, + last_api_call: None, + }).map_err(|e| std::io::Error::new(std::io::ErrorKind::WouldBlock, e))?; + + HttpServer::new(move || { + App::new() + .app_data(web::Data::new(app_state.clone())) + .service(index) + .service(index_apis) + .service(get_api) + .service(put_api) + }) + .bind((server_root, server_port))? + .run() + .await +} diff --git a/src/restack.rs b/src/restack.rs new file mode 100644 index 0000000..45770b7 --- /dev/null +++ b/src/restack.rs @@ -0,0 +1,242 @@ +use std::cmp; + +use serde::{Deserialize, Serialize}; +use thiserror::Error; + +// TODO: relocate to Stack module? +/// Stack index +pub type StackIx = usize; + +// TODO: pretty-printing? +// + REQUIRED: constant compile-time choice of manipulations +// + local: just print [x_old_stack_index_0, x_old_stack_index_1, ..] +// + global: keep track of stack indices (always possible?) and print where it's from??? +/// Stack manipulation: +/// - All these stack manipulations: +/// + dig +/// + dug +/// + dip +/// + dup +/// + swap +/// + drop +/// - Boil down to: +/// 1. drop inputs +/// 2. replicate inputs +/// 3. reorder inputs +/// - Which conveniently boils down to: +/// + xs : [ old_stack_index ] +/// + map (\x -> xs !! x) xs +/// - Which is successful iff all old_stack_index's < stack.len() +#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Serialize, Deserialize)] +pub struct Restack { + /// Number of input stack elements to restack + pub restack_depth: StackIx, + + /// Vector of output stack indices + pub restack_vec: Vec, +} + +impl Restack { + /// (consumed_input_stack_size, produced_output_stack_size) + pub fn stack_io_counts(&self) -> (usize, usize) { + (self.restack_depth, self.restack_vec.len()) + } + + /// Identity Restack, i.e. does nothing + pub fn id() -> Self { + Restack { + restack_depth: 0, + restack_vec: vec![], + } + } + + /// swap first two stack elements + pub fn swap() -> Self { + Restack { + restack_depth: 2, + restack_vec: vec![1usize, 0], + } + } + + /// drop the first (n) stack elements + pub fn drop_n(n: usize) -> Self { + Restack { + restack_depth: n, + restack_vec: vec![] + } + } + + /// Drop the first stack element + pub fn drop() -> Self { + Self::drop_n(1) + } + + /// Duplicates the (ix)th value onto the top of the stack (0-indexed) + pub fn dup_n(ix: usize) -> Self { + Restack { + restack_depth: ix+1, + restack_vec: (ix..=ix).chain(0..=ix).collect(), + } + } + + /// Duplicates the 0th value onto the top of the stack (0-indexed) + pub fn dup() -> Self { + Self::dup_n(0) + } + + /// Pull the (ix)th element to the top of the stack + /// + /// dig 4 = { 5, [3, 0, 1, 2] } + pub fn dig(ix: usize) -> Self { + Restack { + restack_depth: ix+1, + restack_vec: (0..=ix).cycle().skip(ix).take(ix+1).collect(), + } + } + + /// Push the top of the stack to the (ix)th position + /// + /// dug 4 = { 5, [1, 2, 3, 0] } + pub fn dug(ix: usize) -> Self { + Restack { + restack_depth: ix+1, + restack_vec: (1..=ix).chain(std::iter::once(0)).collect() + } + } + + /// Restack a Stack. See Restack::is_valid_depth for validity checking before running + pub fn run(&self, stack: &mut Vec) -> Result<(), RestackError> { + if self.restack_depth <= stack.len() { + let result = self.restack_vec.iter().map(|&restack_index| + match stack.get(restack_index) { + None => Err(RestackError::StackIndexInvalid{ restack_index: restack_index, restack_depth: self.restack_depth, }), + Some(stack_element) => Ok( stack_element.clone() ), + } + ).collect::, RestackError>>(); + match result { + Ok(mut result_ok) => { + result_ok.extend(stack.drain(self.restack_depth..)); + *stack = result_ok; + Ok(()) + }, + Err(e) => Err(e) + } + + } else { + Err(RestackError::InvalidDepth{ stack_len: stack.len(), restack_depth: self.restack_depth, }) + } + } + + /// If true, Restack::run must succeed on all inputs whose lengths are at + /// least as long as self.restack_depth + /// + /// self.is_valid_depth() -> + /// self.restack_depth <= xs.len() -> + /// self.run(xs).is_ok() == true + pub fn is_valid_depth(&self) -> bool { + !self.restack_vec.iter().any(|&restack_index| self.restack_depth <= restack_index) + } + + /// Append two Restack's, i.e. compose them together: + /// + /// x.append(y).run(s) == x.run(y.run(s)) + /// + /// NOTE: inputs and result are unchecked (run is_valid_depth on arguments for safe version) + pub fn append(&self, other: Self) -> Self { + Restack { + restack_depth: cmp::max(self.restack_depth, other.restack_depth), + restack_vec: self.restack_vec.iter().map(|&restack_index| + match other.restack_vec.get(restack_index) { + None => restack_index, + Some(stack_index) => stack_index.clone(), + } + ).collect() + } + } +} + + +#[derive(Clone, Copy, Debug, PartialEq, Error)] +pub enum RestackError { + #[error("invalid Restack: restack_index = {restack_index:?} out of bounds for restack_depth = {restack_depth:?}")] + StackIndexInvalid { + restack_index: usize, + restack_depth: usize, + }, + #[error("attempt to restack {restack_depth:?} elements of a stack with only {stack_len:?} elements")] + InvalidDepth { + stack_len: usize, + restack_depth: usize, + }, +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_restack_id() { + let mut example_stack = vec![false, true]; + let restack = Restack::id(); + assert!(restack.is_valid_depth(), "Restack::id() has invalid depth"); + assert_eq!(Ok(example_stack.clone()), restack.run(&mut example_stack).map(|()| example_stack)) + } + + #[test] + fn test_restack_dig() { + assert!(Restack::dig(4).is_valid_depth(), "Restack::dig(4) has invalid depth"); + assert_eq!(Restack { restack_depth: 5, restack_vec: vec![4, 0, 1, 2, 3] }, Restack::dig(4)); + let mut example_stack_in = vec![false, false, false, false, true]; + let example_stack_out = vec![true, false, false, false, false]; + assert_eq!(Ok(example_stack_out.clone()), Restack::dig(4).run(&mut example_stack_in).map(|()| example_stack_in)) + } + + #[test] + fn test_restack_dug() { + assert!(Restack::dug(4).is_valid_depth(), "Restack::dug(4) has invalid depth"); + assert_eq!(Restack { restack_depth: 5, restack_vec: vec![1, 2, 3, 4, 0] }, Restack::dug(4)); + let mut example_stack_in = vec![true, false, false, false, false]; + let example_stack_out = vec![false, false, false, false, true]; + assert_eq!(Ok(example_stack_out.clone()), Restack::dug(4).run(&mut example_stack_in).map(|()| example_stack_in)) + } + + #[test] + fn test_restack_drop_n() { + for example_stack_out in + [vec![false, true, false], + vec![true, false], + vec![false], + vec![]] { + let mut example_stack_in = vec![false, true, false]; + let restack = Restack::drop_n(3 - example_stack_out.len()); + assert!(restack.is_valid_depth(), "Restack::drop_n(_) has invalid depth"); + assert_eq!(Ok(example_stack_out), restack.run(&mut example_stack_in).map(|()| example_stack_in)); + } + } + + #[test] + fn test_restack_drop() { + let mut example_stack_in = vec![false, true]; + let example_stack_out = vec![true]; + let restack = Restack::drop(); + assert!(restack.is_valid_depth(), "Restack::drop() has invalid depth"); + assert_eq!(Ok(example_stack_out), restack.run(&mut example_stack_in).map(|()| example_stack_in)) + } + + #[test] + fn test_restack_swap() { + let mut example_stack_in = vec![false, true]; + let example_stack_out = vec![true, false]; + let restack = Restack::swap(); + assert!(restack.is_valid_depth(), "Restack::swap() has invalid depth"); + assert_eq!(Ok(example_stack_out), restack.run(&mut example_stack_in).map(|()| example_stack_in)) + } + + #[test] + fn test_restack_swap_twice_append() { + let mut example_stack = vec![false, true]; + let restack = Restack::swap().append(Restack::swap()); + assert!(restack.is_valid_depth(), "Restack::swap().append(Restack::swap()) has invalid depth"); + assert_eq!(Ok(example_stack.clone()), restack.run(&mut example_stack).map(|()| example_stack)) + } +} diff --git a/src/stack.rs b/src/stack.rs new file mode 100644 index 0000000..7eb15eb --- /dev/null +++ b/src/stack.rs @@ -0,0 +1,156 @@ +use crate::elem::{Elem, ElemSymbol}; +use crate::elem_type::StackType; +use crate::an_elem::{AnElem, AnElemError}; +use crate::location::LineNo; + +use std::fmt; +use std::fmt::{Display, Formatter}; +use std::marker::PhantomData; + +use enumset::EnumSet; +use serde::{Deserialize, Serialize}; +use thiserror::Error; +use generic_array::{GenericArray, ArrayLength}; +use typenum::marker_traits::Unsigned; + +// TODO: pub field needed? +/// A Stack of untyped Elem's +#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Serialize, Deserialize)] +pub struct Stack { + /// Ordered list of untyped Elem's + pub stack: Vec, +} + +impl Display for Stack { + fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), fmt::Error> { + f.debug_list() + .entries(self.stack + .iter() + .map(|x| format!("{}", x))) + .finish()?; + Ok(()) + } +} + + +impl Stack { + /// New empty Stack + pub fn new() -> Self { + Stack { + stack: vec![], + } + } + + // TODO: since pop can fail, require passing debug info to it + // (so we know what we were expecting) + /// Pop an Elem from the stack (remove 0th element) + pub fn pop(&mut self) -> Result { + let result = self.stack.get(0).ok_or_else(|| StackError::EmptyStack).map(|x|x.clone())?; + self.stack = self.stack.drain(1..).collect(); + Ok(result.clone()) + } + + /// Pop AnElem from the stack (remove 0th element) + pub fn pop_elem(&mut self, _t: PhantomData) -> Result { + let hd_elem = self.pop()?; + Ok(::from_elem(PhantomData, hd_elem)?) + } + + /// Push an Elem onto the Stack (new 0th element) + pub fn push(&mut self, elem: Elem) { + let mut memo = vec![elem]; + memo.append(&mut self.stack); + self.stack = memo; + } + + /// Push AnElem onto the Stack (new 0th element) + pub fn push_elem(&mut self, elem: impl AnElem) { + self.push(elem.to_elem()) + } + + /// Pop a GenericArray from the Stack + // TODO: reversed? + pub fn pop_generic_array>(&mut self, + _t: PhantomData, + _n: PhantomData) -> Result, StackError> { + let mut xs = vec![]; + for _current_index in 1..::USIZE { + let hd_elem = self.pop()?; + xs.push(AnElem::from_elem(PhantomData::, hd_elem)?) + } + GenericArray::from_exact_iter(xs).ok_or_else(|| StackError::PopGenericArray) + } + + /// Type of the Stack's elements + pub fn type_of(&self) -> StackType { + StackType { + types: self.stack.clone().into_iter().map(|x| x.elem_type(vec![])).collect(), + } + } + + /// Debug a Stack's type + pub fn debug_type(&self) -> () { + println!("stack type:\n{}", self.type_of()) + } + + /// Debug a Stack, including its type + pub fn debug(&self) -> Result<(), serde_json::Error> { + self.debug_type(); + println!("------------------------------------------------------------------------------------------"); + for stack_elem in &self.stack { + println!("------------------------------"); + println!("{}", serde_json::to_string_pretty(stack_elem)?) + } + Ok(()) + } +} + + +/// Stack errors +#[derive(Clone, Debug, Error)] +pub enum StackError { + /// Stack::pop: tried to pop from an empty stack + #[error("Stack::pop: tried to pop from an empty stack")] + EmptyStack, + + /// Stack::pop_elem error, i.e. type mismatch + #[error("Stack:pop_elem threw an error from AnElem\n{0}")] + AnElemError(AnElemError), + + /// Elem found does not match expected ElemSymbol's + #[error("pop: element popped from the stack {found:?} wasn't the expected type {expected:?} (remaining stack: {stack})")] + UnexpectedElemTypeIn { + /// Expected ElemSymbol's + expected: EnumSet, + + /// Elem found + found: Elem, + + /// Stack popped from + stack: Stack, + }, + + /// Running instruction resulted in an error (from IsInstructionT) + #[error("Stack::run_instruction: instruction {name:?} produced error: {error:?}\non line number: {line_no:?}")] + RunInstruction { + /// Instruction name + name: String, + + /// Instruction error + error: String, + + /// Instruction line number + line_no: LineNo, + }, + + // TODO: add error detail + /// GenericArray::from_exact_iter failed + #[error("Stack::pop_generic_array: failed during GenericArray::from_exact_iter")] + PopGenericArray, +} + +impl From for StackError { + fn from(x: AnElemError) -> Self { + Self::AnElemError(x) + } +} diff --git a/src/typed_instr.rs b/src/typed_instr.rs new file mode 100644 index 0000000..bbed700 --- /dev/null +++ b/src/typed_instr.rs @@ -0,0 +1,71 @@ +use crate::elem::ElemSymbol; +use crate::restack::Restack; +use crate::untyped_instruction::{Instruction, InstructionError}; +use crate::typed_instruction::{IsStackInstruction, StackInstructionError}; +use crate::typed_instructions::{AssertTrue, Lookup, Concat, Slice, Push, + StringEq, BytesEq, ToJson, Index, CheckLe, CheckLt, CheckEq, HashSha256, + StringToBytes, UnpackJson}; + +use std::marker::PhantomData; +use std::fmt::Debug; +use std::sync::Arc; + +use serde_json::{Map, Number, Value}; + +/// A dynamically-resolved IsStackInstruction or Restack +#[derive(Clone, Debug)] +pub enum Instr { + /// Dynamically-resolved IsStackInstruction + Instr(Arc), + + /// Restack + Restack(Restack), +} + +impl Instr { + /// Convert an Instr (typed) to an Instruction (untyped) + pub fn to_instruction(&self) -> Result { + match self { + Self::Instr(instr) => instr.to_instruction(), + Self::Restack(restack) => Ok(Instruction::Restack(restack.clone())), + } + } +} + +impl Instruction { + /// Convert an Instruction to an Instr, only failing when UnpackJson is + /// applied to an ElemSymbol that doesn't represent valid JSON + pub fn to_instr(self) -> Result { + match self { + Self::Push(elem) => Ok(Instr::Instr(Arc::new(Push { push: elem }))), + Self::Restack(restack) => Ok(Instr::Restack(restack.clone())), + Self::HashSha256 => Ok(Instr::Instr(Arc::new(HashSha256 {}))), + Self::CheckLe => Ok(Instr::Instr(Arc::new(CheckLe {}))), + Self::CheckLt => Ok(Instr::Instr(Arc::new(CheckLt {}))), + Self::CheckEq => Ok(Instr::Instr(Arc::new(CheckEq {}))), + Self::StringEq => Ok(Instr::Instr(Arc::new(StringEq {}))), + Self::BytesEq => Ok(Instr::Instr(Arc::new(BytesEq {}))), + Self::Concat => Ok(Instr::Instr(Arc::new(Concat {}))), + Self::Slice => Ok(Instr::Instr(Arc::new(Slice {}))), + Self::Index => Ok(Instr::Instr(Arc::new(Index {}))), + Self::Lookup => Ok(Instr::Instr(Arc::new(Lookup {}))), + Self::AssertTrue => Ok(Instr::Instr(Arc::new(AssertTrue {}))), + Self::ToJson => Ok(Instr::Instr(Arc::new(ToJson {}))), + Self::UnpackJson(elem_symbol) => { + match elem_symbol { + ElemSymbol::Unit => Ok(Instr::Instr(Arc::new(UnpackJson { t: PhantomData::<()> }))), + ElemSymbol::Bool => Ok(Instr::Instr(Arc::new(UnpackJson { t: PhantomData:: }))), + ElemSymbol::Number => Ok(Instr::Instr(Arc::new(UnpackJson { t: PhantomData:: }))), + ElemSymbol::String => Ok(Instr::Instr(Arc::new(UnpackJson { t: PhantomData:: }))), + ElemSymbol::Array => Ok(Instr::Instr(Arc::new(UnpackJson { t: PhantomData::> }))), + ElemSymbol::Object => Ok(Instr::Instr(Arc::new(UnpackJson { t: PhantomData::> }))), + _ => Err(InstructionError::UnpackJson { + elem_symbol: elem_symbol, + }) + } + }, + Self::StringToBytes => Ok(Instr::Instr(Arc::new(StringToBytes {}))), + } + } +} + diff --git a/src/typed_instrs.rs b/src/typed_instrs.rs new file mode 100644 index 0000000..5e4bef1 --- /dev/null +++ b/src/typed_instrs.rs @@ -0,0 +1,133 @@ +use crate::elem::ElemSymbol; +use crate::elem_type::{ElemType, StackType}; +use crate::stack::Stack; +use crate::restack::Restack; +use crate::elems::ElemsPopError; +use crate::typed_instruction::{IsStackInstruction, StackInstructionError}; +use crate::typed_instr::Instr; + +use std::fmt::Debug; +use std::sync::Arc; + +use enumset::EnumSet; + +/// A list of Instr's. See Instr for more info +#[derive(Clone, Debug)] +pub struct Instrs { + /// A list of Instr's + pub instrs: Vec, +} + +impl Instrs { + /// A new empty list of Instr's + pub fn new() -> Self { + Instrs { + instrs: vec![], + } + } + + /// Print the list of Instr's for debugging + pub fn debug(&self) -> Result<(), ElemsPopError> { + println!("instructions:"); + for (line_no, instruction) in self.instrs.iter().enumerate() { + println!("#{:?}:", line_no); + match instruction { + Instr::Instr(instr) => { + println!("{:?}", instr); + println!("{}\n", instr.type_of()?); + }, + Instr::Restack(restack) => { + println!("{:?}", restack); + println!("{}\n", + restack + .type_of(From::from(line_no)) + .map_err(|e| ElemsPopError::RestackError(e))?); + }, + } + } + println!("--------------------------------------------------------------------------------"); + println!(""); + Ok(()) + } + + /// Assuming an input stack of [Json, Json, ..] (num_input_json count), + /// what's the monomorphic type of Self? + pub fn type_of_mono(&self, num_input_json: usize) -> Result { + let mut stack_type = (0..num_input_json).map(|_| ElemType::from_locations(EnumSet::only(ElemSymbol::Json), vec![])).collect(); + for (line_no, instr_or_restack) in (&self.instrs).into_iter().enumerate() { + println!("------------------------------------------------------------------------------------------"); + println!("line_no: {}", line_no); + println!("{:?}\n", instr_or_restack); + match instr_or_restack { + Instr::Instr(instr) => { + let mut instr_type = instr.type_of() + .map_err(|e| StackInstructionError::ElemsPopError(e))?; + println!("instr: {}\n", instr_type); + stack_type = instr_type.specialize_to_input_stack(stack_type) + .map_err(|e| StackInstructionError::TypeError(e))?; + }, + Instr::Restack(restack) => { + restack.run(&mut stack_type.types) + .map_err(|e| StackInstructionError::RestackError(e))? + }, + } + } + println!("------------------------------------------------------------------------------------------"); + println!("Finished running successfully.\n"); + println!("Final stack:"); + Ok(stack_type) + } + + /// Run the list of individually-typed instructions. It can fail if adjacent + /// instructions have non-matching types, e.g. if "Push(true)" is + /// immediately followed by "UnpackJson". + pub fn run(&self, stack: &mut Stack) -> Result<(), StackInstructionError> { + for (line_no, instr_or_restack) in (&self.instrs).into_iter().enumerate() { + stack.debug().map_err(|e| StackInstructionError::DebugJsonError(Arc::new(e)))?; + println!("------------------------------------------------------------------------------------------"); + println!("line_no: {}", line_no); + println!("{:?}\n", instr_or_restack); + match instr_or_restack { + Instr::Instr(instr) => { + println!(""); + stack.debug_type(); + match instr.type_of() { + Ok(instr_type) => { + println!("instr: {}\n", instr_type); + let mut mut_instr_type = instr_type.clone(); + match mut_instr_type + .specialize_to_input_stack(stack.type_of()) { + Ok(_) => println!("specialized: {}\n", mut_instr_type), + Err(e) => println!("specialization failed:\n{}\n", e), + } + }, + Err(e) => println!("instr type_of errror: {}\n", e), + } + println!(""); + instr.stack_run(stack)? + }, + Instr::Restack(restack) => { + restack.run(&mut stack.stack) + .map_err(|e| StackInstructionError::RestackError(e))? + }, + } + } + println!("------------------------------------------------------------------------------------------"); + println!("Finished running successfully.\n"); + println!("Final stack:"); + stack.debug().map_err(|e| StackInstructionError::DebugJsonError(Arc::new(e)))?; + Ok(()) + } + + /// Push an instruction that IsStackInstruction onto the list of instructions + pub fn instr(&mut self, instr: impl IsStackInstruction + 'static) -> () { + self.instrs.push(Instr::Instr(Arc::new(instr))) + } + + /// Push a Restack onto the list of instructions + pub fn restack(&mut self, restack: Restack) -> () { + self.instrs.push(Instr::Restack(restack)) + } +} + + diff --git a/src/typed_instruction.rs b/src/typed_instruction.rs new file mode 100644 index 0000000..bbe76b5 --- /dev/null +++ b/src/typed_instruction.rs @@ -0,0 +1,105 @@ +use crate::elem::ElemSymbol; +use crate::stack::Stack; +use crate::restack::RestackError; +use crate::types::{Type, TypeError}; +use crate::elems::ElemsPopError; +use crate::elems_list::IsList; +use crate::elems_list_input_output::IOList; +use crate::untyped_instruction::Instruction; + +use std::marker::PhantomData; +use std::fmt::Debug; +use std::sync::Arc; + +use thiserror::Error; + +/// A typed instruction with explicit input, output, and error types +pub trait IsInstructionT: Debug { + /// The input/output type of the instruction + type IO: IOList; + + /// All possible errors that can result from running this instruction. + /// Empty can be used for none. + type Error: std::error::Error; + + /// Convert to an untyped instruction + fn to_instruction(&self) -> Result; + + /// The String name of the Instruction + fn name(x: PhantomData) -> String; + + /// Run the instruction, returning all results using the IOList interface + fn run(&self, x: &Self::IO) -> Result<(), Self::Error>; +} + +#[derive(Clone, Debug, Error)] +pub enum StackInstructionError { + #[error("StackInstructionError::ElemsPopError:\n{0}")] + ElemsPopError(ElemsPopError), + + #[error("RawStackInstructionError:\n{0}")] + RawStackInstructionError(String), + + #[error("MissingOutput:\n{instruction}\n\n{stack_input}")] + // TODO: more granular error typing + MissingOutput { + instruction: String, + stack_input: String, + }, + + #[error("Instrs::type_of_mono type error:\n{0}")] + TypeError(TypeError), + + #[error("StackInstructionError::RestackError:\n{0}")] + RestackError(RestackError), + + #[error("StackInstructionError::DebugJsonError:\n{0}")] + DebugJsonError(Arc), + + #[error("UnpackJsonNotSingleton:\n{first_value:?}\n{second_value:?}")] + UnpackJsonNotSingleton { + first_value: Option, + second_value: Option, + }, + +} + +pub trait IsStackInstruction: Debug { + fn to_instruction(&self) -> Result; + fn name(&self) -> String; + fn type_of(&self) -> Result; + fn stack_run(&self, stack: &mut Stack) -> Result<(), StackInstructionError>; +} + +impl IsStackInstruction for T +where + T: IsInstructionT, +{ + fn to_instruction(&self) -> Result { + self.to_instruction() + } + + fn name(&self) -> String { + IsInstructionT::name(PhantomData::) + } + + fn type_of(&self) -> Result { + IOList::type_of(PhantomData::<::IO>) + } + + fn stack_run(&self, stack: &mut Stack) -> Result<(), StackInstructionError> { + let stack_input = &IsList::pop(PhantomData::<::IO>, stack) + .map_err(|e| StackInstructionError::ElemsPopError(e))?; + self.run(stack_input) + .map_err(|e| StackInstructionError::RawStackInstructionError(format!("{:?}", e)))?; + let output_value = stack_input + .returning() + .ok_or_else(|| StackInstructionError::MissingOutput { + instruction: format!("{:?}", self), + stack_input: format!("{:?}", stack_input), + })?; + stack.push(output_value); + Ok(()) + } +} + diff --git a/src/typed_instructions.rs b/src/typed_instructions.rs new file mode 100644 index 0000000..13e0f8a --- /dev/null +++ b/src/typed_instructions.rs @@ -0,0 +1,792 @@ +use crate::elem::{Elem, ElemSymbol}; +use crate::an_elem::AnElem; +use crate::types::empty::Empty; +use crate::elems_singleton::Singleton; +use crate::elems_or::Or; +use crate::elems_all::AllElems; +use crate::elems_input_output_singleton::ReturnSingleton; +use crate::elems_input_output_or::ReturnOr; +use crate::elems_list::IsList; +use crate::elems_list_nil::Nil; +use crate::elems_list_cons::Cons; +use crate::elems_list_input_output_cons::ConsOut; +use crate::untyped_instruction::Instruction; +use crate::typed_instruction::{IsInstructionT, StackInstructionError}; + +use std::cmp; +use std::convert::TryFrom; +use std::marker::PhantomData; +use std::fmt::Debug; +use std::sync::Arc; +use std::string::FromUtf8Error; + +use enumset::EnumSet; +use generic_array::typenum::{U0, U1, U2}; +use serde_json::{Map, Number, Value}; +use thiserror::Error; + + +/// forall T <- {Vec, Vec, Map} +/// input: [x: T, y: T] +/// output: [x.into_iter().chain(y.into_iter()).collect(): T] +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub struct Concat {} + +// TODO: add String! +// (Self::String(x), Self::String(y)) => { +// Ok(Self::String(String::from_utf8(Self::concat_generic(Vec::from(x.clone()), Vec::from(y.clone()))) +// .map_err(|_| ElemError::ConcatInvalidUTF8 { lhs: x, rhs: y })?)) +// }, +// +// bytes, array, object +impl IsInstructionT for Concat { + type IO = ConsOut, U2, + ReturnOr, U2, + ReturnSingleton, U2>>>, Nil>; + type Error = Empty; + + fn to_instruction(&self) -> Result { + Ok(Instruction::Concat) + } + + fn name(_x: PhantomData) -> String { + "concat".to_string() + } + + fn run(&self, x: &Self::IO) -> Result<(), Self::Error> { + let y = x.clone().hd(); + match y { + ReturnOr::Left { array, returning } => { + let lhs = &array[0]; + let rhs = &array[1]; + returning.returning(lhs.into_iter().chain(rhs.into_iter()).cloned().collect()); + }, + ReturnOr::Right(ReturnOr::Left { array, returning }) => { + let lhs = &array[0]; + let rhs = &array[1]; + returning.returning(lhs.into_iter().chain(rhs.into_iter()).cloned().collect()); + }, + ReturnOr::Right(ReturnOr::Right(ReturnSingleton { singleton, returning })) => { + let lhs = &singleton.array[0]; + let rhs = &singleton.array[1]; + returning.returning(lhs.into_iter().chain(rhs.into_iter()).map(|xy| (xy.0.clone(), xy.1.clone())).collect()); + }, + } + Ok(()) + } +} + + +/// input: [x: Bool] +/// output: [x: Bool] +/// +/// Fails iff x is false +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub struct AssertTrue {} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Error)] +#[error("AssertTrue: found false")] +pub struct AssertTrueError {} + +impl IsInstructionT for AssertTrue { + type IO = ConsOut, Nil>; + type Error = AssertTrueError; + + fn to_instruction(&self) -> Result { + Ok(Instruction::AssertTrue) + } + + fn name(_x: PhantomData) -> String { + "assert_true".to_string() + } + + fn run(&self, x: &Self::IO) -> Result<(), Self::Error> { + let array = x.clone().hd().singleton.array; + let returning = x.clone().hd().returning; + if array[0] { + returning.returning(true); + Ok(()) + } else { + Err(AssertTrueError {}) + } + } +} + + + +/// input: [] +/// output: [T] +#[derive(Clone, Copy, Debug, PartialEq)] +pub struct Push { + /// The value to push + pub push: T, +} + +impl IsInstructionT for Push { + type IO = ConsOut, Nil>; + type Error = Empty; + + fn to_instruction(&self) -> Result { + Ok(Instruction::Push(self.push.clone().to_elem())) + } + + fn name(_x: PhantomData) -> String { + format!("push_{:?}", AnElem::elem_symbol(PhantomData::)) + } + + fn run(&self, x: &Self::IO) -> Result<(), Self::Error> { + x.clone().hd().returning.returning(self.push.clone()); + Ok(()) + } +} + + +/// input: [Bytes] +/// output: [Bytes] +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub struct HashSha256 {} + +impl IsInstructionT for HashSha256 { + type IO = ConsOut, U1>, Nil>; + type Error = Empty; + + fn to_instruction(&self) -> Result { + Ok(Instruction::HashSha256) + } + + fn name(_x: PhantomData) -> String { + "sha256".to_string() + } + + fn run(&self, x: &Self::IO) -> Result<(), Self::Error> { + let array = x.clone().hd().singleton.array; + let returning = x.clone().hd().returning; + returning.returning(super::sha256(&array[0])); + Ok(()) + } +} + +/// forall T <- {Vec, String, Vec, Map} +/// input: [offset: Number, length: Number, iterable: T] +/// output: [iterable: T] +/// +/// Fails if slice is missing or too big +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub struct Slice {} + +#[derive(Clone, Debug, PartialEq, Eq, Error)] +pub enum SliceError { + #[error("SliceError::OffsetNotU64: \n{0}")] + OffsetNotU64(Number), + + #[error("SliceError::LengthNotU64: \n{0}")] + LengthNotU64(Number), + + #[error("SliceError::Overflow: \noffset: {offset} \nlength: {length}")] + Overflow { + offset: Number, + length: Number, + }, + + #[error("SliceError::TooShort: \noffset: {offset} \nlength: {length} \n{iterable}")] + TooShort { + offset: usize, + length: usize, + iterable: String, + }, + + #[error("SliceError::FromUtf8Error: \n{0}")] + FromUtf8Error(FromUtf8Error), +} + +impl From for SliceError { + fn from(error: FromUtf8Error) -> Self { + Self::FromUtf8Error(error) + } +} + +// bytes, string, array, object +impl IsInstructionT for Slice { + type IO = ConsOut, U1, + ReturnOr, U1, + ReturnSingleton, U1>>>>, + Cons, Nil>>; + type Error = SliceError; + + fn to_instruction(&self) -> Result { + Ok(Instruction::Slice) + } + + fn name(_x: PhantomData) -> String { + "slice".to_string() + } + + fn run(&self, x: &Self::IO) -> Result<(), Self::Error> { + let y = x.clone().hd(); + let offset_length = x.clone().tl().hd().array; + let offset = &offset_length[0]; + let length = &offset_length[1]; + let u_offset = offset.as_u64() + .ok_or_else(|| SliceError::OffsetNotU64(offset.clone())) + .and_then(|x| usize::try_from(x).map_err(|_| SliceError::Overflow { offset: offset.clone(), length: length.clone() }))?; + let u_length = length.as_u64() + .ok_or_else(|| SliceError::LengthNotU64(length.clone())) + .and_then(|x| usize::try_from(x).map_err(|_| SliceError::Overflow { offset: offset.clone(), length: length.clone() }))?; + let u_offset_plus_length = u_offset.checked_add(u_length) + .ok_or_else(|| SliceError::Overflow { offset: offset.clone(), length: length.clone() })?; + match y.clone() { + ReturnOr::Left { array, returning } => { + let iterable = &array[0]; + if iterable.clone().into_iter().count() < u_offset_plus_length { + Err(()) + } else { + returning.returning(iterable.into_iter().skip(u_offset).take(u_length).copied().collect()); + Ok(()) + } + }, + ReturnOr::Right(ReturnOr::Left { array, returning }) => { + let iterable = &array[0]; + if iterable.len() < u_offset_plus_length { + Err(()) + } else { + returning.returning(String::from_utf8(Vec::from(iterable.clone()).into_iter().skip(u_offset).take(u_length).collect())?); + Ok(()) + } + }, + ReturnOr::Right(ReturnOr::Right(ReturnOr::Left { array, returning })) => { + let iterable = &array[0]; + if iterable.clone().into_iter().count() < u_offset_plus_length { + Err(()) + } else { + returning.returning(iterable.into_iter().skip(u_offset).take(u_length).cloned().collect()); + Ok(()) + } + }, + ReturnOr::Right(ReturnOr::Right(ReturnOr::Right(ReturnSingleton { singleton: Singleton { array }, returning }))) => { + let iterable = &array[0]; + if iterable.clone().into_iter().count() < u_offset_plus_length { + Err(()) + } else { + returning.returning(iterable.into_iter().skip(u_offset).take(u_length).map(|xy| (xy.0.clone(), xy.1.clone())).collect()); + Ok(()) + } + }, + }.map_err(|_e| { + SliceError::TooShort { + offset: u_offset, + length: u_length, + // TODO: better error + iterable: format!("{:?}", y), + } + }) + } +} + + + +/// input: [index: Number, iterable: Iterator] +/// output: [iterable[index]: Value] +/// +/// Fails if index is missing or too big +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub struct Index {} +#[derive(Clone, Debug, PartialEq, Eq, Error)] +pub enum IndexError { + #[error("Index: index not valid u64: {0:?}")] + IndexNotU64(Number), + + #[error("Index: index not valid usize: {0:?}")] + Overflow(Number), + + #[error("Index: iterable: {iterable:?}\nis too short for index: {index:?}")] + TooShort { + index: usize, + iterable: String, + }, +} + +// bytes, array, object +impl IsInstructionT for Index { + type IO = ConsOut, + Cons, + Cons, U1, + Singleton, U1>>, Nil>>>; + type Error = IndexError; + + fn to_instruction(&self) -> Result { + Ok(Instruction::Index) + } + + fn name(_x: PhantomData) -> String { + "index".to_string() + } + + fn run(&self, x: &Self::IO) -> Result<(), Self::Error> { + let returning = x.clone().hd().returning; + let index = x.clone().tl().hd().array[0].clone(); + let y = &x.clone().tl().tl().hd(); + let u_index = index.as_u64() + .ok_or_else(|| IndexError::IndexNotU64(index.clone())) + .and_then(|x| usize::try_from(x).map_err(|_| IndexError::Overflow(index.clone())))?; + + let result = match y.clone() { + Or::Left(array) => { + array[0] + .clone() + .into_iter() + .skip(u_index) + .next() + }, + Or::Right(Singleton { array }) => { + array[0] + .clone() + .into_iter() + .skip(u_index) + .next() + .map(|(_x, y)| y) + }, + }.ok_or_else(|| { + IndexError::TooShort { + index: u_index, + // TODO: better error + iterable: format!("{:?}", y), + } + })?; + returning.returning(result); + Ok(()) + } +} + +/// input: [x] +/// output: [serde_json::to_value(x): Value] +/// +/// Fails if serde_json::to_value does +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub struct ToJson {} +#[derive(Clone, Debug, Error)] +#[error("ToJson failed with a serde_json error: \n{input} \n{error}")] +pub struct ToJsonError { + input: Elem, + error: Arc, +} + +impl IsInstructionT for ToJson { + type IO = ConsOut, Cons, Nil>>; + type Error = ToJsonError; + + fn to_instruction(&self) -> Result { + Ok(Instruction::ToJson) + } + + fn name(_x: PhantomData) -> String { + "to_json".to_string() + } + + fn run(&self, x: &Self::IO) -> Result<(), Self::Error> { + let returning = x.clone().hd().returning; + let y = &x.clone().tl().hd(); + let array = y.untyped(); + let z = array[0].clone(); + returning.returning(serde_json::to_value(z.clone()) + .map_err(move |e| ToJsonError { + input: z, + error: Arc::new(e), + })?); + Ok(()) + } +} + +/// input: [key: String, map: Map] +/// output: [map.get(key): Value] +/// +/// Fails if key is missing +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub struct Lookup {} +#[derive(Clone, Debug, PartialEq, Eq, Error)] +#[error("Lookup failed, key not in map: \n{key:?} \n{map:?}")] +pub struct LookupError { + key: String, + map: Map, +} + +impl IsInstructionT for Lookup { + type IO = ConsOut, + Cons, + Cons, U1>, Nil>>>; + type Error = LookupError; + + fn to_instruction(&self) -> Result { + Ok(Instruction::Lookup) + } + + fn name(_x: PhantomData) -> String { + "lookup".to_string() + } + + fn run(&self, x: &Self::IO) -> Result<(), Self::Error> { + let returning = x.clone().hd().returning; + let key = &x.clone().tl().hd().array[0]; + let map = &x.clone().tl().tl().hd().array[0]; + returning.returning(map.get(key) + .ok_or_else(|| LookupError { + key: key.clone(), + map: map.clone(), + })?.clone()); + Ok(()) + } +} + + +/// input: [x: Value] +/// output: [AJsonElem::from_value(PhantomData::, x): T] +#[derive(Debug)] +pub struct UnpackJson { + /// The target type of the Value to unpack + pub t: PhantomData, +} +#[derive(Debug, Error)] +#[error("UnpackJson failed to unpack JSON: \n{elem_symbol:?} \n{input}")] +pub struct UnpackJsonError { + elem_symbol: EnumSet, + input: Value, +} + +pub trait AJsonElem: AnElem { + fn to_value(self) -> Value; + fn from_value(t: PhantomData, x: Value) -> Option where Self: Sized; +} + +impl AJsonElem for () { + fn to_value(self) -> Value { + Value::Null + } + + fn from_value(_t: PhantomData, x: Value) -> Option where Self: Sized { + match x { + Value::Null => Some(()), + _ => None, + } + } +} + +impl AJsonElem for bool { + fn to_value(self) -> Value { + Value::Bool(self) + } + + fn from_value(_t: PhantomData, x: Value) -> Option where Self: Sized { + match x { + Value::Bool(y) => Some(y), + _ => None, + } + } +} + +impl AJsonElem for Number { + fn to_value(self) -> Value { + Value::Number(self) + } + + fn from_value(_t: PhantomData, x: Value) -> Option where Self: Sized { + match x { + Value::Number(y) => Some(y), + _ => None, + } + } +} + +impl AJsonElem for String { + fn to_value(self) -> Value { + Value::String(self) + } + + fn from_value(_t: PhantomData, x: Value) -> Option where Self: Sized { + match x { + Value::String(y) => Some(y), + _ => None, + } + } +} + +impl AJsonElem for Vec { + fn to_value(self) -> Value { + Value::Array(self) + } + + fn from_value(_t: PhantomData, x: Value) -> Option where Self: Sized { + match x { + Value::Array(y) => Some(y), + _ => None, + } + } +} + +impl AJsonElem for Map { + fn to_value(self) -> Value { + Value::Object(self) + } + + fn from_value(_t: PhantomData, x: Value) -> Option where Self: Sized { + match x { + Value::Object(y) => Some(y), + _ => None, + } + } +} + +impl IsInstructionT for UnpackJson { + type IO = ConsOut, + Cons, Nil>>; + type Error = UnpackJsonError; + + fn to_instruction(&self) -> Result { + let mut symbol_set = ::elem_symbol(PhantomData).into_iter(); + match (symbol_set.next(), symbol_set.next()) { + (Some(elem_symbol), None) => Ok(Instruction::UnpackJson(elem_symbol)), + (x, y) => Err(StackInstructionError::UnpackJsonNotSingleton { + first_value: x, + second_value: y, + }), + } + } + + fn name(_x: PhantomData) -> String { + "unpack_json".to_string() + } + + fn run(&self, x: &Self::IO) -> Result<(), Self::Error> { + let returning = x.clone().hd().returning; + let json = &x.clone().tl().hd().array[0]; + let result = + AJsonElem::from_value(PhantomData::, json.clone()) + .ok_or_else(|| UnpackJsonError { + elem_symbol: AnElem::elem_symbol(PhantomData::), + input: json.clone(), + })?; + returning.returning(result); + Ok(()) + } +} + + +/// input: [x: String] +/// output: [x.into_bytes(): Vec] +/// +/// false if incomparable +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub struct StringToBytes {} + +impl IsInstructionT for StringToBytes { + type IO = ConsOut, U0>, Cons, Nil>>; + type Error = Empty; + + fn to_instruction(&self) -> Result { + Ok(Instruction::StringToBytes) + } + + fn name(_x: PhantomData) -> String { + "string_to_bytes".to_string() + } + + fn run(&self, x: &Self::IO) -> Result<(), Self::Error> { + let returning = x.clone().hd().returning; + let in_str = &x.clone().tl().hd().array[0]; + returning.returning(in_str.clone().into_bytes()); + Ok(()) + } +} + +/// forall T, +/// input: [x: T, y: T] +/// output: [x <= y : bool] +/// +/// false if incomparable +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub struct CheckLe {} +#[derive(Clone, Debug, PartialEq, Eq, Error)] +#[error("CheckLe applied to incomparable elements: \n{lhs:?}\n {rhs:?}\n")] +pub struct CheckLeError { + lhs: Elem, + rhs: Elem, +} + +impl IsInstructionT for CheckLe { + type IO = ConsOut, Cons, Nil>>; + type Error = CheckLeError; + + fn to_instruction(&self) -> Result { + Ok(Instruction::CheckLe) + } + + fn name(_x: PhantomData) -> String { + "check_le".to_string() + } + + fn run(&self, x: &Self::IO) -> Result<(), Self::Error> { + let returning = x.clone().hd().returning; + let y = &x.clone().tl().hd(); + let array = y.untyped(); + let lhs = array[0].clone(); + let rhs = array[1].clone(); + let cmp_result = lhs.partial_cmp(&rhs) + .ok_or_else(|| CheckLeError { + lhs: lhs, + rhs: rhs + })?; + let result = match cmp_result { + cmp::Ordering::Less => true, + cmp::Ordering::Equal => true, + cmp::Ordering::Greater => false, + }; + returning.returning(result); + Ok(()) + } +} + +/// forall T, +/// input: [x: T, y: T] +/// output: [x < y : bool] +/// +/// false if incomparable +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub struct CheckLt {} +#[derive(Clone, Debug, PartialEq, Eq, Error)] +#[error("CheckLt applied to incomparable elements: \n{lhs:?}\n {rhs:?}\n")] +pub struct CheckLtError { + lhs: Elem, + rhs: Elem, +} + +impl IsInstructionT for CheckLt { + type IO = ConsOut, Cons, Nil>>; + type Error = CheckLtError; + + fn to_instruction(&self) -> Result { + Ok(Instruction::CheckLt) + } + + fn name(_x: PhantomData) -> String { + "check_lt".to_string() + } + + fn run(&self, x: &Self::IO) -> Result<(), Self::Error> { + let returning = x.clone().hd().returning; + let y = &x.clone().tl().hd(); + let array = y.untyped(); + let lhs = array[0].clone(); + let rhs = array[1].clone(); + let cmp_result = lhs.partial_cmp(&rhs) + .ok_or_else(|| CheckLtError { + lhs: lhs, + rhs: rhs + })?; + let result = match cmp_result { + cmp::Ordering::Less => true, + _ => false, + }; + returning.returning(result); + Ok(()) + } +} + + +/// forall T, +/// input: [x: T, y: T] +/// output: [x == y : bool] +/// +/// false if incomparable +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub struct CheckEq {} +#[derive(Clone, Debug, PartialEq, Eq, Error)] +#[error("CheckEq applied to incomparable elements: \n{lhs:?}\n {rhs:?}\n")] +pub struct CheckEqError { + lhs: Elem, + rhs: Elem, +} + +impl IsInstructionT for CheckEq { + type IO = ConsOut, Cons, Nil>>; + type Error = CheckEqError; + + fn to_instruction(&self) -> Result { + Ok(Instruction::CheckEq) + } + + fn name(_x: PhantomData) -> String { + "check_eq".to_string() + } + + fn run(&self, x: &Self::IO) -> Result<(), Self::Error> { + let returning = x.clone().hd().returning; + let y = &x.clone().tl().hd(); + let array = y.untyped(); + let lhs = array[0].clone(); + let rhs = array[1].clone(); + let cmp_result = lhs.partial_cmp(&rhs) + .ok_or_else(|| CheckEqError { + lhs: lhs, + rhs: rhs + })?; + let result = match cmp_result { + cmp::Ordering::Equal => true, + _ => false, + }; + returning.returning(result); + Ok(()) + } +} + +/// input: [x: String, y: String] +/// output: [x == y: bool] +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub struct StringEq {} + +impl IsInstructionT for StringEq { + type IO = ConsOut, Cons, Nil>>; + type Error = Empty; + + fn to_instruction(&self) -> Result { + Ok(Instruction::StringEq) + } + + fn name(_x: PhantomData) -> String { + "check_eq".to_string() + } + + fn run(&self, x: &Self::IO) -> Result<(), Self::Error> { + let returning = x.clone().hd().returning; + let array = &x.clone().tl().hd().array; + let lhs = array[0].clone(); + let rhs = array[1].clone(); + returning.returning(lhs == rhs); + Ok(()) + } +} + +/// input: [x: Vec, y: Vec] +/// output: [x == y: bool] +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub struct BytesEq {} + +impl IsInstructionT for BytesEq { + type IO = ConsOut, Cons, U2>, Nil>>; + type Error = Empty; + + fn to_instruction(&self) -> Result { + Ok(Instruction::BytesEq) + } + + fn name(_x: PhantomData) -> String { + "check_eq".to_string() + } + + fn run(&self, x: &Self::IO) -> Result<(), Self::Error> { + let returning = x.clone().hd().returning; + let array = &x.clone().tl().hd().array; + let lhs = array[0].clone(); + let rhs = array[1].clone(); + returning.returning(lhs == rhs); + Ok(()) + } +} + diff --git a/src/types.rs b/src/types.rs new file mode 100644 index 0000000..dcea3c7 --- /dev/null +++ b/src/types.rs @@ -0,0 +1,374 @@ +pub(crate) mod empty; + +pub(crate) mod type_id; +use type_id::TypeId; +use type_id::map::TypeIdMapError; + +pub(crate) mod context; +use context::{Context, ContextError}; + +use crate::restack::{Restack, RestackError}; +use crate::location::LineNo; +use crate::elem_type::{ElemType, StackType}; + +use std::fmt::{Display, Formatter}; +use std::fmt; + +use thiserror::Error; + +// typing: +// - unification +// + inference +// + checking against inferred or other type (this + inference = bidirecitonal) +// - two categories of tests: +// + property tests for typing methods themselves +// + test that a function having a particular type -> it runs w/o type errors on such inputs + +// TODO: make fields private +/// Type of a series of instructions +#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] +pub struct Type { + /// TypeId Context, assigning ElemType's to each TypeId + pub context: Context, + + /// Input type stack (all TypeId's must be in the Context) + pub i_type: Vec, + + /// Output type stack (all TypeId's must be in the Context) + pub o_type: Vec, +} + +impl Type { + /// Identity Type + pub fn id() -> Self { + Type { + context: Context::new(), + i_type: vec![], + o_type: vec![], + } + } + + /// The next TypeId, guaranteed to not be present in the Context + pub fn next_type_id(&self) -> TypeId { + self.context.next_type_id + } + + /// check whether all the TypeId's are valid + pub fn is_valid(&self) -> bool { + let next_type_id = self.next_type_id(); + self.context.is_valid() && + !(self.i_type.iter().any(|x| *x >= next_type_id) || + self.o_type.iter().any(|x| *x >= next_type_id)) + } + + /// Equivalent to running update_type_id w/ offset from largest to smallest + /// existing TypeId + pub fn offset(&self, offset: TypeId) -> Self { + Type { + context: self.context.offset(offset), + i_type: self.i_type.iter().map(|x| x.offset(offset)).collect(), + o_type: self.o_type.iter().map(|x| x.offset(offset)).collect(), + } + } + + /// Update a TypeId, failing if "from" isn't present or "to" already is + pub fn update_type_id(&mut self, from: TypeId, to: TypeId) -> Result<(), TypeError> { + self.context.update_type_id(from, to).map_err(|e| TypeError::UpdateTypeId(e))?; + self.i_type = self.i_type.iter().map(|x| x.update_type_id(from, to)).collect(); + self.o_type = self.o_type.iter().map(|x| x.update_type_id(from, to)).collect(); + Ok(()) + } + + /// Normalize self.context on self.i_type as a basis + pub fn normalize(&self) -> Result { + let mut basis = self.i_type.clone(); + basis.append(&mut self.o_type.clone()); + basis.dedup(); + let (new_context, type_map) = self.context.normalize_on(basis).map_err(|e| TypeError::NormalizeContextError(e))?; + Ok(Type { + context: new_context, + i_type: type_map.run(self.i_type.clone()).map_err(|e| TypeError::TypeIdMapError(e))?, + o_type: type_map.run(self.o_type.clone()).map_err(|e| TypeError::TypeIdMapError(e))?, + }) + } + + /// Specialize self to the given StackType, or fail if it's not a valid + /// specialization. + /// + /// Returns the output stack + pub fn specialize_to_input_stack(&mut self, stack_type: StackType) -> Result { + if self.i_type.len() <= stack_type.len() { + let mut stack_type_iter = stack_type.clone().into_iter(); + for (type_id, elem_type) in self.i_type.clone().into_iter().zip(&mut stack_type_iter) { + // TODO: elimate copy? + let elem_type_copy = elem_type.clone(); + self.context.unify_elem_type(type_id, elem_type).map_err(|e| TypeError::Specialization { + type_id: type_id, + elem_type: elem_type_copy, + context: self.context.clone(), + error: e, + })? + } + for elem_type in stack_type_iter { + let type_id = self.context.push(elem_type); + self.i_type.push(type_id); + } + // Ok(()) + + Ok(StackType { + types: self.o_type.clone().into_iter().map(|type_id| { + self.context.clone().get(&type_id, &|| ContextError::SpecializeToInputStack { + type_of: self.clone(), + stack_type: stack_type.clone(), + }) + }).collect::, ContextError>>() + .map_err(|e| TypeError::SpecializeToInputStackContextError(e))?, + }) + + } else { + Err(TypeError::SpecializeToInputStack { + type_of: self.clone(), + stack_type: stack_type.clone(), + }) + } + } + + /// Unify two Type's by producing the type of their composition. + /// + /// f : self + /// g : other + /// self.compose(other) : (f ++ g).type_of() + /// + /// input -> + /// other.i_type + /// other.o_type + /// self.i_type + /// self.o_type + /// -> output + /// + /// 1. iterate through (zip(self.o_type, other.i_type)) and unify the pairs into a new context + /// 2. collect the remainder and add them to the context + /// 3. add the remainder to (self.i_type, other.o_type), with replaced variables + pub fn compose(&self, other: Self) -> Result { + println!(""); + println!("composing:\n{0}\n\nAND\n{1}\n", self, other); + + let mut context = self.context.clone(); + // println!("context: {}", context); + // println!("context.next_type_id: {:?}", context.next_type_id.type_id); + + let offset_other = other.offset(self.next_type_id()); + // println!("offset_other: {}", offset_other); + + context.disjoint_union(offset_other.context.clone()) + .map_err(|e| TypeError::ComposeContextError(e))?; + // println!("context union: {}", context); + + let mut mut_offset_other = offset_other.clone(); + let mut zip_len = 0; + let other_o_type = offset_other.o_type.iter().clone(); + let self_i_type = self.i_type.iter().clone(); + other_o_type.zip(self_i_type).try_for_each(|(&o_type, &i_type)| { + zip_len += 1; + context + .unify(o_type, i_type) + .map_err(|e| TypeError::ComposeContextError(e))?; + mut_offset_other + .update_type_id(o_type, i_type)?; + Ok(()) + })?; + + Ok(Type { + context: context, + i_type: mut_offset_other.i_type.iter().chain(self.i_type.iter().skip(zip_len)).copied().collect(), + o_type: self.o_type.iter().chain(mut_offset_other.o_type.iter().skip(zip_len)).copied().collect(), + }) + } + + /// Prepend inputs to self.i_type + pub fn prepend_inputs(&mut self, num_copies: usize, elem_type: ElemType) -> () { + if 0 < num_copies { + let type_id = self.context.push(elem_type); + self.i_type = (1..num_copies).into_iter() + .map(|_| type_id) + .chain(self.i_type.clone().into_iter()) + .collect() + } + } + + /// Append inputs to self.i_type + pub fn append_inputs(&mut self, elem_types: T) -> () + where + T: IntoIterator, + { + for elem_type in elem_types { + let type_id = self.context.push(elem_type); + self.i_type.push(type_id) + } + } + +} + +// Formatting: +// ``` +// Type { +// context: Context { +// context: [ +// (t0, {A, B, C}), +// (t1, {B, C}), +// .. +// (tN, {D, E, F})], +// next_type_id: N+1, +// }, +// i_type: [0, 1, .., N], +// 0_type: [i, j, .., k], +// } +// ``` +// +// Results in: +// +// ∀ (t0 ∊ {A, B, C}), +// ∀ (t1 ∊ {B, C}), +// .. +// ∀ (tN ∊ {D, E, F}), +// [t0, t1, .., tN] -> +// [ti, tj, .., tk] +// +impl Display for Type { + fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), fmt::Error> { + // TODO: fix normalize + // let self_normalized = self.normalize().map_err(|_| fmt::Error)?; + let self_normalized = self; + write!(f, + "{context}\n[{i_type}] ->\n[{o_type}]", + context = self_normalized.context, + i_type = self_normalized.i_type.iter().fold(String::new(), |memo, x| { + let x_str = x.debug(); + if memo == "" { + x_str + } else { + memo + ", " + &x_str + }}), + o_type = self_normalized.o_type.iter().fold(String::new(), |memo, x| { + let x_str = x.debug(); + if memo == "" { + x_str + } else { + memo + ", " + &x_str + }})) + } +} + +#[cfg(test)] +mod type_display_tests { + use super::*; + use enumset::EnumSet; + + #[test] + fn test_empty() { + let big_type_id = TypeId::new(2^32); + let context = Context::new().offset(big_type_id); + let example_type = Type { + context: context, + i_type: vec![], + o_type: vec![], + }; + assert_eq!("\n[] ->\n[]", format!("{}", example_type)); + } + + #[test] + fn test_singleton() { + for elem_symbol in EnumSet::all().iter() { + let elem_type = ElemType { + type_set: EnumSet::only(elem_symbol), + info: vec![], + }; + let mut context = Context::new(); + let type_id = context.push(elem_type.clone()); + let example_type = Type { + context: context, + i_type: vec![type_id, type_id], + o_type: vec![type_id], + }; + assert_eq!(format!("\n∀ (t0 ∊ {}),\n[t0, t0] ->\n[t0]", elem_type), format!("{}", example_type)); + } + } +} + +/// Type trait errors +#[derive(Clone, Debug, PartialEq, Error)] +pub enum TypeError { + /// "Specialization error:\ntype_id:\n{type_id}\n\nelem_type:\n{elem_type}\n\ncontext:\n{context}\n\nerror:\n{error}" + #[error("Specialization error:\ntype_id:\n{type_id}\n\nelem_type:\n{elem_type}\n\ncontext:\n{context}\n\nerror:\n{error}")] + Specialization { + /// ElemType with this TypeId is invalid specialization of Context + type_id: TypeId, + + /// ElemType for type_id is invalid specialization of Context + elem_type: ElemType, + + /// Context not compatible with TypeId, ElemType pair + context: Context, + + /// ContextError + error: ContextError, + }, + + /// "NormalizeContextError\n{0}" + #[error("NormalizeContextError\n{0}")] + NormalizeContextError(ContextError), + + /// "ComposeContextError\n{0}" + #[error("ComposeContextError\n{0}")] + ComposeContextError(ContextError), + + /// "TypeError::update_type_id failed when updating the Context:\n{0}" + #[error("TypeError::update_type_id failed when updating the Context:\n{0}")] + UpdateTypeId(ContextError), + + /// "TypeError::compose disjoint_union\n{0}" + #[error("TypeError::compose disjoint_union\n{0}")] + ComposeDisjointUnion(ContextError), + + /// "Type::normalize applying TypeIdMap failed:\n{0}" + #[error("Type::normalize applying TypeIdMap failed:\n{0}")] + TypeIdMapError(TypeIdMapError), + + /// "Type::specialize_to_input_stack ContextError:\n{0}" + #[error("Type::specialize_to_input_stack ContextError:\n{0}")] + SpecializeToInputStackContextError(ContextError), + + // TODO: use StackType and Display instead of Vec + /// "Type::specialize_to_input_stack: stack_type shorter than expected:\n{type_of}\n{stack_type}" + #[error("Type::specialize_to_input_stack: stack_type shorter than expected:\n{type_of}\n{stack_type}")] + SpecializeToInputStack { + /// Type too long for stack_type + type_of: Type, + + /// Shorter than expected StackType + stack_type: StackType, + }, +} + + +impl Restack { + /// Calculate the Type of a Restack instruction + /// + /// In short, the input stack is [x_1, x_2, .. x_restack_depth] + /// and the output stack is self.restack(input_stack) + // TODO: fix locations: out locations are mislabeled as in locations + pub fn type_of(&self, line_no: LineNo) -> Result { + let mut context = Context::new(); + let mut restack_type: Vec = (0..self.restack_depth) + .map(|x| context.push(ElemType::any(vec![line_no.in_at(x)]))) + .collect(); + let i_type = restack_type.clone(); + self.run(&mut restack_type)?; + Ok(Type { + context: context, + i_type: i_type, + o_type: restack_type, + }) + } +} + diff --git a/src/types/context.rs b/src/types/context.rs new file mode 100644 index 0000000..e7848fb --- /dev/null +++ b/src/types/context.rs @@ -0,0 +1,385 @@ +use crate::elem_type::{ElemType, ElemTypeError, StackType}; +use crate::types::type_id::TypeId; +use crate::types::type_id::map::{TypeIdMap, TypeIdMapError}; +use crate::types::Type; + +use std::cmp; +use std::collections::BTreeMap; +use std::fmt::{Display, Formatter}; +use std::fmt; +use std::sync::Arc; + +use thiserror::Error; + +/// A context defining associations between TypeId's and ElemType's +#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] +pub struct Context { + context: BTreeMap, + + /// TODO: make read-only + pub next_type_id: TypeId, +} + +// Formatting: +// ``` +// Context { +// context: [ +// (t0, {A, B, C}), +// (t1, {B, C}), +// .. +// (tN, {D, E, F})], +// next_type_id: N+1, +// } +// ``` +// +// Results in: +// ``` +// ∀ (t0 ∊ {A, B, C}), +// ∀ (t1 ∊ {B, C}), +// .. +// ∀ (tN ∊ {D, E, F}), +// ``` +impl Display for Context { + fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), fmt::Error> { + write!(f, + "{}", + self.context.iter() + .fold(String::new(), |memo, (i, xs)| { + memo + + "\n" + + &format!("∀ ({t_i} ∊ {xs}),", t_i = i.debug(), xs = xs).to_string() + })) + } +} + +#[cfg(test)] +mod context_display_tests { + use super::*; + use enumset::EnumSet; + + #[test] + fn test_empty() { + let big_type_id = TypeId::new(2^32); + let context = Context::new().offset(big_type_id); + assert_eq!("", format!("{}", context)); + } + + #[test] + fn test_singleton() { + for elem_symbol in EnumSet::all().iter() { + let elem_type = ElemType { + type_set: EnumSet::only(elem_symbol), + info: vec![], + }; + let mut context = Context::new(); + context.push(elem_type.clone()); + assert_eq!(format!("\n∀ (t0 ∊ {}),", elem_type), format!("{}", context)); + } + } +} + +impl Context { + /// New empty context with next_type_id = 0. + pub fn new() -> Self { + Context { + context: BTreeMap::new(), + next_type_id: TypeId::new(0), + } + } + + /// Is self.context valid with respect to self.next_type_id? + pub fn is_valid(&self) -> bool { + !self.context.keys().any(|x| *x >= self.next_type_id) + } + + /// The size of self.context + pub fn size(&self) -> usize { + self.context.len() + } + + /// Push a new ElemType onto the Context, returning its TypeId + pub fn push(&mut self, elem_type: ElemType) -> TypeId { + let push_id = self.next_type_id; + self.context.insert(push_id, elem_type); + self.next_type_id = push_id.offset(TypeId::new(1)); + push_id + } + + /// Normalize the naming of TypeId's along the given basis vector, returning a TypeIdMap with + /// the new associations. + /// + /// Note: NormalizeOnInvalidBasis is possible iff a `TypeId` in (basis) is repeated + /// or missing from (self) + pub fn normalize_on(&self, basis: Vec) -> Result<(Self, TypeIdMap), ContextError> { + let mut source = self.clone(); + let mut result = Self::new(); + let mut type_map = TypeIdMap::new(); + for &type_id in &basis { + match source.context.remove(&type_id) { + None => Err(ContextError::NormalizeOnInvalidBasis { + type_id: type_id, + context: self.clone(), + basis: basis.clone().into_iter().collect(), + }), + Some(elem_type) => { + let new_type_id = result.next_type_id; + result.push(elem_type); + type_map.push(type_id, new_type_id)?; + Ok(()) + }, + }? + } + Ok((result, type_map)) + } + + /// Offset all TypeId's + pub fn offset(&self, offset: TypeId) -> Self { + Context { + context: self.context.iter().map(|(k, x)| (k.offset(offset), x.clone())).collect(), + next_type_id: self.next_type_id.offset(offset), + } + } + + /// Update a TypeId, fails if: + /// - The "from" destination TypeId does not exist in self.context + /// - The "to" destination TypeId already exists in self.context + pub fn update_type_id(&mut self, from: TypeId, to: TypeId) -> Result<(), ContextError> { + if self.context.contains_key(&from) { + Ok(()) + } else { + Err(ContextError::UpdateTypeIdFromMissing { + from: from, + to: to, + context: self.clone(), + }) + }?; + if self.context.contains_key(&to) { + Err(ContextError::UpdateTypeIdToPresent { + from: from, + to: to, + context: self.clone(), + }) + } else { + Ok(()) + }?; + self.context = self.context.iter().map(|(k, x)| (k.update_type_id(from, to), x.clone())).collect(); + self.next_type_id = cmp::max(self.next_type_id, to); + Ok(()) + } + + /// Disjoint union of two Context's: fails if not disjoint + pub fn disjoint_union(&mut self, other: Self) -> Result<(), ContextError> { + for (&type_id, elem_type) in other.context.iter() { + match self.context.insert(type_id, elem_type.clone()) { + None => { + Ok(()) + }, + Some(conflicting_elem_type) => Err(ContextError::DisjointUnion { + type_id: type_id, + elem_type: elem_type.clone(), + conflicting_elem_type: conflicting_elem_type, + lhs: self.clone(), + rhs: other.clone(), + }), + }? + } + self.next_type_id = cmp::max(self.next_type_id, other.next_type_id); + Ok(()) + } + + /// Get the ElemType associated with the given TypeId + pub fn get(&mut self, index: &TypeId, error: &dyn Fn() -> ContextError) -> Result { + Ok(self.context.get(index).ok_or_else(|| ContextError::GetUnknownTypeId { + context: self.clone(), + index: *index, + error: Arc::new(error()), + })?.clone()) + } + + /// Unify the types of two TypeId's into the RHS, + /// removing the LHS + pub fn unify(&mut self, xi: TypeId, yi: TypeId) -> Result<(), ContextError> { + let x_type = self.context.remove(&xi).ok_or_else(|| ContextError::Unify { + xs: self.clone(), + xi: xi.clone(), + yi: yi.clone(), + is_lhs: true, + })?; + let y_type = self.context.remove(&yi).ok_or_else(|| ContextError::Unify { + xs: self.clone(), + xi: xi.clone(), + yi: yi.clone(), + is_lhs: false, + })?; + let xy_type = x_type.unify(y_type).or_else(|e| Err(ContextError::UnifyElemType { + xs: self.clone(), + xi: xi.clone(), + yi: yi.clone(), + error: e, + }))?; + self.context.insert(yi, xy_type); + Ok(()) + } + + /// Unify the given ElemType into a particular TypeId in self.context + pub fn unify_elem_type(&mut self, xi: TypeId, elem_type: ElemType) -> Result<(), ContextError> { + let yi = self.push(elem_type); + self.unify(xi, yi) + } + + /// Maximum possible TypeId, not maximum present + pub fn max_type_id(&self) -> Result { + self.next_type_id.previous().ok_or_else(|| ContextError::MaxTypeId(self.clone())) + // let type_id = self.next_type_id.type_id; + // if type_id == 0 { + // Err(ContextError::MaxTypeId(self.clone())) + // } else { + // Ok(TypeId { + // type_id: type_id - 1, + // }) + // } + } +} + +/// Context trait errors +#[derive(Clone, Debug, PartialEq, Error)] +pub enum ContextError { + /// "Context::get applied to a TypeId: \n{index:?}\n, not in the Context: \n + /// {context:?}\n, error: \n{error:?}\n" + #[error("Context::get applied to a TypeId: \n{index:?}\n, not in the Context: \n{context:?}\n, error: \n{error:?}\n")] + GetUnknownTypeId { + /// Given Context + context: Context, + + /// TypeId not found + index: TypeId, + + /// Associated error + error: Arc, + }, + + /// "Context::disjoint_union applied to lhs: \n{lhs:?}\n, and rhs: \n{rhs:?}\n, + /// with type_id: \n{type_id:?}\n, and elem_type: \n{elem_type:?}\n, + /// conflicted with lhs entry conflicting_elem_type: {conflicting_elem_type:?\n}\n" + #[error("Context::disjoint_union applied to lhs: \n{lhs:?}\n, and rhs: \n{rhs:?}\n, / + with type_id: \n{type_id:?}\n, and elem_type: \n{elem_type:?}\n, conflicted / + with lhs entry conflicting_elem_type: {conflicting_elem_type:?\n}\n")] + DisjointUnion { + /// Conflicting TypeId + type_id: TypeId, + + /// LHS conflicting ElemType + elem_type: ElemType, + + /// RHS conflicting ElemType + conflicting_elem_type: ElemType, + + /// RHS Context + lhs: Context, + + /// RHS Context + rhs: Context, + }, + + /// "Context::normalize_on applied to invalid basis: type_id: \n + /// {type_id:?}\n, context: \n{context:?}\n, basis: \n{basis:?}\n" + #[error("Context::normalize_on applied to invalid basis: type_id: \n{type_id:?}\n, context: \n{context:?}\n, basis: \n{basis:?}\n")] + NormalizeOnInvalidBasis { + /// TypeId invalid for given context, basis + type_id: TypeId, + + /// Given Context + context: Context, + + /// Basis of TypeId's to normalize onto: attempts to sort by this basis + basis: Vec, + }, + + /// "Context::update_type_id called on missing 'from: TypeId':\n from: \n + /// {from:?}\n to: {to:?}\n context: {context:?}" + #[error("Context::update_type_id called on missing 'from: TypeId':\n from: \n{from:?}\n to: {to:?}\n context: {context:?}")] + UpdateTypeIdFromMissing { + /// Updating TypeId from + from: TypeId, + + /// Updating TypeId to + to: TypeId, + + /// Given Context + context: Context, + }, + + /// "Context::update_type_id called on already-present 'to: TypeId':\n from: \n + /// {from:?}\n\n to: \n{to:?}\n context: \n{context:?}\n" + #[error("Context::update_type_id called on already-present 'to: TypeId':\n from: \n{from:?}\n\n to: \n{to:?}\n context: \n{context:?}\n")] + UpdateTypeIdToPresent { + /// Updating TypeId from + from: TypeId, + + /// Updating TypeId to + to: TypeId, + + /// Given Context + context: Context, + }, + + /// "Context::unify failed:\n xs: \n{xs:?}\n xi: \n{xi:?}\n yi: \n{yi:?}\n + /// is_lhs: \n{is_lhs:?}\n" + #[error("Context::unify failed:\n xs: \n{xs:?}\n xi: \n{xi:?}\n yi: \n{yi:?}\n is_lhs: \n{is_lhs:?}\n")] + Unify { + /// Given Context + xs: Context, + + /// RHS + xi: TypeId, + + /// LHS + yi: TypeId, + + /// Is it on the LHS? + is_lhs: bool, + }, + + /// "Context::unify failed to unify ElemType's:\n\nxs:\n{xs}\n\nxi:\n{xi}\n\n + /// yi:\n{yi}\n\nelem_error:\n{error}\n" + #[error("Context::unify failed to unify ElemType's:\n\nxs:\n{xs}\n\nxi:\n{xi}\n\nyi:\n{yi}\n\nelem_error:\n{error}\n")] + UnifyElemType { + /// Given Context + xs: Context, + + /// RHS TypeId + xi: TypeId, + + /// LHS TypeId + yi: TypeId, + + /// ElemTypeError + error: ElemTypeError, + }, + + /// "Type::specialize_to_input_stack failed to resolve ElemType's:\ntype_of:\n + /// {type_of}\n\nstack_type:\n{stack_type}" + #[error("Type::specialize_to_input_stack failed to resolve ElemType's:\ntype_of:\n{type_of}\n\nstack_type:\n{stack_type}")] + SpecializeToInputStack { + /// The type being specialized + type_of: Type, + + /// Stack type attempted to specialize to + stack_type: StackType, + }, + + /// "Context::normalize_on building TypeIdMap failed: \n{0:?}\n" + #[error("Context::normalize_on building TypeIdMap failed: \n{0:?}\n")] + TypeIdMapError(TypeIdMapError), + + /// "Context::max_type_id: next_type_id == 0: \n{0:?}\n" + #[error("Context::max_type_id: next_type_id == 0: \n{0:?}\n")] + MaxTypeId(Context), +} + +impl From for ContextError { + fn from(error: TypeIdMapError) -> Self { + Self::TypeIdMapError(error) + } +} + diff --git a/src/types/empty.rs b/src/types/empty.rs new file mode 100644 index 0000000..e289aad --- /dev/null +++ b/src/types/empty.rs @@ -0,0 +1,15 @@ +use std::marker::PhantomData; + +use thiserror::Error; + +/// An empty enum or impossible-to-inhabit type +#[derive(Clone, Copy, Debug, PartialEq, Eq, Error)] +pub enum Empty {} + +impl Empty { + /// Given Empty, produce anything + pub fn absurd(&self, _p: PhantomData) -> T { + match *self {} + } +} + diff --git a/src/types/type_id.rs b/src/types/type_id.rs new file mode 100644 index 0000000..a7a725f --- /dev/null +++ b/src/types/type_id.rs @@ -0,0 +1,59 @@ + +pub(crate) mod map; + +use std::fmt::{Display, Formatter}; +use std::fmt; + +/// A Type ID, represented as a usize +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] +pub struct TypeId { + type_id: usize, +} + +impl Display for TypeId { + fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), fmt::Error> { + write!(f, "type#{}", self.type_id) + } +} + +impl TypeId { + /// New TypeId with the given ID + pub fn new(type_id: usize) -> Self { + Self { + type_id: type_id, + } + } + + /// format!("t{}", self.type_id) + pub fn debug(&self) -> String { + format!("t{}", self.type_id) + } + + /// Subtract one or return None if 0 + pub fn previous(&self) -> Option { + self.type_id.checked_sub(1).map(|type_id| Self { + type_id: type_id, + }) + } + + // TODO: test by checking: + // xs.map(TypeId).fold(x, offset) = TypeId(xs.fold(x, +)) + /// Offset (add) one TypeId to another + pub fn offset(&self, offset: TypeId) -> Self { + TypeId { + type_id: self.type_id + offset.type_id, + } + } + + /// Replaces "from" TypeId with "to" TypeId. + /// + /// For compatibility with update_type_id in Context, etc. + pub fn update_type_id(&self, from: Self, to: Self) -> Self { + if *self == from { + to + } else { + *self + } + } +} + diff --git a/src/types/type_id/map.rs b/src/types/type_id/map.rs new file mode 100644 index 0000000..ad01139 --- /dev/null +++ b/src/types/type_id/map.rs @@ -0,0 +1,84 @@ +use crate::types::type_id::TypeId; + +use std::collections::BTreeMap; + +use thiserror::Error; + +/// A mapping between assignments of TypeId's +/// +/// Used to preserve consistency of associations from TypeId to ElemType when +/// updating multiple TypeId's +#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] +pub struct TypeIdMap { + map: BTreeMap, +} + +impl TypeIdMap { + /// New empty TypeIdMap + pub fn new() -> Self { + TypeIdMap { + map: BTreeMap::new(), + } + } + + /// Add a mapping to the TypeIdMap, failing if the "from" TypeId" already + /// exists in the map + pub fn push(&mut self, from: TypeId, to: TypeId) -> Result<(), TypeIdMapError> { + if self.map.contains_key(&from) { + Err(TypeIdMapError::PushExists { + from: from, + to: to, + map: self.clone(), + }) + } else { + self.map.insert(from, to); + Ok(()) + } + } + + /// Resolve the map on a single TypeId + pub fn get(&self, index: &TypeId, location: usize) -> Result<&TypeId, TypeIdMapError> { + self.map.get(index) + .ok_or_else(|| TypeIdMapError::GetUnknownTypeId { + index: index.clone(), + location: location, + type_map: self.clone(), + }) + } + + /// Resolve the map on a Vec of TypeId's + pub fn run(&self, type_vars: Vec) -> Result, TypeIdMapError> { + type_vars.iter().enumerate().map(|(i, x)| Ok(self.get(x, i)?.clone())).collect() + } +} + +/// TypeIdMap trait errors +#[derive(Clone, Debug, PartialEq, Error)] +pub enum TypeIdMapError { + /// "TypeIdMap::get attempted to get a TypeId: {index:?}, not in the map: {type_map:?}; at location in TypeIdMap::run {location:?}" + #[error("TypeIdMap::get attempted to get a TypeId: {index:?}, not in the map: {type_map:?}; at location in TypeIdMap::run {location:?}")] + GetUnknownTypeId { + /// Missing TypeId + index: TypeId, + + /// TypeIdMap::run location + location: usize, + + /// index missing from this TypeIdMap + type_map: TypeIdMap, + }, + + /// "TypeIdMap::push already exists: mapping from: {from:?}, to: {to:?}, in TypeIdMap {map:?}" + #[error("TypeIdMap::push already exists: mapping from: {from:?}, to: {to:?}, in TypeIdMap {map:?}")] + PushExists { + /// _.push(from, _) + from: TypeId, + + /// _.push(_, to) + to: TypeId, + + /// TypeId "from" already present in this TypeIdMap + map: TypeIdMap, + }, +} + diff --git a/src/untyped_instruction.rs b/src/untyped_instruction.rs new file mode 100644 index 0000000..0c37bbd --- /dev/null +++ b/src/untyped_instruction.rs @@ -0,0 +1,38 @@ +#![allow(missing_docs)] + +use crate::elem::{Elem, ElemSymbol}; +use crate::restack::Restack; + +use std::fmt::Debug; + +use serde::{Deserialize, Serialize}; +use thiserror::Error; + +#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Serialize, Deserialize)] +pub enum Instruction { + Push(Elem), + Restack(Restack), + HashSha256, + CheckLe, + CheckLt, + CheckEq, + StringEq, + BytesEq, + Concat, + Slice, + Index, + Lookup, + AssertTrue, + ToJson, + UnpackJson(ElemSymbol), + StringToBytes, +} + +#[derive(Clone, Copy, Debug, Error)] +pub enum InstructionError { + #[error("Instruction::to_instr UnpackJson does not support: {elem_symbol:?}")] + UnpackJson { + elem_symbol: ElemSymbol, + } +} + diff --git a/src/untyped_instructions.rs b/src/untyped_instructions.rs new file mode 100644 index 0000000..dd5ed34 --- /dev/null +++ b/src/untyped_instructions.rs @@ -0,0 +1,58 @@ +use crate::untyped_instruction::{Instruction, InstructionError}; +use crate::typed_instruction::StackInstructionError; +use crate::typed_instr::Instr; +use crate::typed_instrs::Instrs; + +use serde::{Deserialize, Serialize}; + +/// A list of untyped instructions +#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Serialize, Deserialize)] +pub struct Instructions { + /// A list of untyped instructions + pub instructions: Vec, +} + +impl IntoIterator for Instructions { + type Item = Instruction; + type IntoIter = as std::iter::IntoIterator>::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + self.instructions.into_iter() + } +} + +impl Instructions { + /// Convert to a list of typed instructions + pub fn to_instrs(self) -> Result { + Ok(Instrs { + instrs: self.into_iter().map(|x| x.to_instr()).collect::, InstructionError>>()?, + }) + } +} + +impl Instrs { + /// Convert to a list of untyped instructions + pub fn to_instructions(self) -> Result { + Ok(Instructions { + instructions: self.instrs.into_iter().map(|x| x.to_instruction()).collect::, StackInstructionError>>()?, + }) + } +} + +// Test program #1: [] -> [] +// +// Instruction::Push(Elem::Bool(true)), +// Instruction::Restack(Restack::id()), +// Instruction::AssertTrue, + +// Test program #2 +// +// ∀ (t0 ∊ {JSON}), +// ∀ (t1 ∊ {JSON}), +// ∀ (t2 ∊ {Object}), +// [t1] -> +// [t0, t2, t1] +// +// Instruction::Push(Elem::Json(Default::default())), +// Instruction::UnpackJson(ElemSymbol::Object), +// Instruction::Restack(Restack::dup()),