Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions fuzz/fuzz_lib/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ impl<'f> Extractor<'f> {

/// Attempt to yield a type from the fuzzer.
pub fn extract_final_type(&mut self) -> Option<Arc<FinalTy>> {
// We can costruct extremely large types by duplicating Arcs; there
// We can construct extremely large types by duplicating Arcs; there
// is no need to have an exponential blowup in the number of tasks.
const MAX_N_TASKS: usize = 300;

Expand Down Expand Up @@ -206,7 +206,7 @@ impl<'f> Extractor<'f> {

/// Attempt to yield a type from the fuzzer.
pub fn extract_old_final_type(&mut self) -> Option<Arc<OldFinalTy>> {
// We can costruct extremely large types by duplicating Arcs; there
// We can construct extremely large types by duplicating Arcs; there
// is no need to have an exponential blowup in the number of tasks.
const MAX_N_TASKS: usize = 300;

Expand Down
4 changes: 4 additions & 0 deletions src/analysis.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@ use std::{cmp, fmt};
use crate::value::Word;
#[cfg(feature = "elements")]
use elements::encode::Encodable;
#[cfg(feature = "serde")]
use serde::Serialize;
#[cfg(feature = "elements")]
use std::{convert::TryFrom, io};

Expand Down Expand Up @@ -65,6 +67,7 @@ impl From<U32Weight> for bitcoin::Weight {
/// Programs that are CPU-heavy need to be padded
/// so that the witness stack provides a large-enough budget.
#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
#[cfg_attr(feature = "serde", derive(Serialize))]
pub struct Cost(u32);

impl Cost {
Expand Down Expand Up @@ -215,6 +218,7 @@ impl From<Cost> for bitcoin::Weight {

/// Bounds on the resources required by a node during execution on the Bit Machine
#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[cfg_attr(feature = "serde", derive(Serialize))]
pub struct NodeBounds {
/// Upper bound on the required number of cells (bits).
/// The root additionally requires the bit width of its source and target type (input, output)
Expand Down
2 changes: 1 addition & 1 deletion src/dag.rs
Original file line number Diff line number Diff line change
Expand Up @@ -347,7 +347,7 @@ pub trait DagLike: Sized {
/// with the returned nodes. To correct this, you need to call
/// [`PostOrderIterItem::unswap`].
///
/// To avoid confusion, this structure cannot be directly costructed.
/// To avoid confusion, this structure cannot be directly constructed.
/// Instead it is implicit in the [`DagLike::rtl_post_order_iter`]
/// method.
#[derive(Clone, Debug)]
Expand Down
4 changes: 2 additions & 2 deletions src/human_encoding/named_node.rs
Original file line number Diff line number Diff line change
Expand Up @@ -504,7 +504,7 @@ pub struct Namer {
}

impl Namer {
/// Costruct a new `Namer`. Will assign the name `main` to the node with
/// Construct a new `Namer`. Will assign the name `main` to the node with
/// the given CMR.
pub fn new_rooted(root_cmr: Cmr) -> Self {
Namer {
Expand All @@ -515,7 +515,7 @@ impl Namer {
}
}

/// Costruct a new `Namer`.
/// Construct a new `Namer`.
pub fn new() -> Self {
Namer {
const_idx: 0,
Expand Down
50 changes: 43 additions & 7 deletions src/jet/elements/c_env.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
//! High level APIs for creating C FFI compatible environment.
//!

use bitcoin::taproot::TAPROOT_ANNEX_PREFIX;
use hashes::Hash;
use std::os::raw::c_uchar;

Expand Down Expand Up @@ -33,7 +34,6 @@ struct RawOutputData {
/// passed to the C FFI.
#[derive(Debug)]
struct RawInputData {
#[allow(dead_code)] // see FIXME below
pub annex: Option<Vec<c_uchar>>,
// pegin
pub genesis_hash: Option<[c_uchar; 32]>,
Expand Down Expand Up @@ -75,8 +75,12 @@ fn new_raw_input<'raw>(
inp_data: &'raw RawInputData,
) -> c_elements::CRawInput<'raw> {
c_elements::CRawInput {
// FIXME actually pass the annex in; see https://github.com/BlockstreamResearch/simplicity/issues/311 for some difficulty here.
annex: core::ptr::null(),
annex: inp_data
.annex
.as_ref()
.map(|annex| c_elements::CRawBuffer::new(annex))
.as_ref()
.map_or(core::ptr::null(), |ptr| ptr as *const _),
prev_txid: inp.previous_output.txid.as_ref(),
pegin: inp_data.genesis_hash.as_ref(),
issuance: if inp.has_issuance() {
Expand Down Expand Up @@ -114,7 +118,7 @@ fn new_tx_data(tx: &elements::Transaction, in_utxos: &[ElementsUtxo]) -> RawTran
};
for (inp, in_utxo) in tx.input.iter().zip(in_utxos.iter()) {
let inp_data = RawInputData {
annex: None, // Actually store annex
annex: get_annex(&inp.witness).map(|s| s.to_vec()),
genesis_hash: inp
.pegin_data()
.map(|x| x.genesis_hash.to_raw_hash().to_byte_array()),
Expand Down Expand Up @@ -148,14 +152,29 @@ pub(super) fn new_tx(
) -> *mut c_elements::CTransaction {
let mut raw_inputs = Vec::new();
let mut raw_outputs = Vec::new();
// Allocate space for the raw annexes. This dumb `Vec::from_iter` construction is
// equivalent to `vec![None; tx.input.len()]`, but that won't compile because it
// requires Option::<CRawBuffer>::None to be cloneable, which it's not because
// CRawBuffer isn't.

// SAFETY: this allocation *must* live until after the `simplicity_mallocTransaction`
// at the bottom of this function. We convert the vector to a boxed slice to ensure
// it cannot be resized, which would potentially trigger a reallocation.
let mut raw_annexes = Vec::from_iter((0..tx.input.len()).map(|_| None)).into_boxed_slice();

let txid = tx.txid();
let tx_data = new_tx_data(tx, in_utxos);
for ((inp, in_utxo), inp_data) in tx
for (((n, inp), in_utxo), inp_data) in tx
.input
.iter()
.enumerate()
.zip(in_utxos.iter())
.zip(tx_data.inputs.iter())
{
raw_annexes[n] = inp_data
.annex
.as_ref()
.map(|annex| c_elements::CRawBuffer::new(annex));
let res = new_raw_input(inp, in_utxo, inp_data);
raw_inputs.push(res);
}
Expand All @@ -172,10 +191,17 @@ pub(super) fn new_tx(
version: tx.version,
locktime: tx.lock_time.to_consensus_u32(),
};
unsafe {
let ret = unsafe {
// SAFETY: this is a FFI call and we constructed its argument correctly.
c_elements::simplicity_mallocTransaction(&c_raw_tx)
}
};

// Explicitly drop raw_annexes so Rust doesn't try any funny business dropping it early.
// Drop raw_inputs first since it contains pointers into raw_annexes and we don't want
// them to dangle. (It'd be safe since they're raw pointers, but still bad mojo.)
drop(raw_inputs);
drop(raw_annexes);
ret
}

pub(super) fn new_tap_env(
Expand Down Expand Up @@ -256,3 +282,13 @@ fn serialize_surjection_proof(surjection_proof: &Option<Box<SurjectionProof>>) -
.map(|x| x.serialize())
.unwrap_or_default()
}

/// If the last item in the witness stack is an annex, return the data following the 0x50 byte.
fn get_annex(in_witness: &elements::TxInWitness) -> Option<&[u8]> {
let last_item = in_witness.script_witness.last()?;
if *last_item.first()? == TAPROOT_ANNEX_PREFIX {
Some(&last_item[1..])
} else {
None
}
}
2 changes: 1 addition & 1 deletion src/node/convert.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ pub enum Hide {
/// 4. Finally, the node's data is passed to [`Self::convert_data`], whose job
/// it is to compute the cached data for the new node. For `case` combinators
/// where one child was pruned, `convert_data` will receive an `assertl` or
/// `assertl`, as appropriate, rather than a `case`.
/// `assertr`, as appropriate, rather than a `case`.
///
/// If any method returns an error, then iteration is aborted immediately and
/// the error returned to the caller. If the converter would like to recover
Expand Down
2 changes: 1 addition & 1 deletion src/node/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -612,7 +612,7 @@ impl<N: Marker> Node<N> {

/// Contruct a node from its constituent parts.
///
/// This method can be used to directly costruct a node. It will compute the CMR
/// This method can be used to directly construct a node. It will compute the CMR
/// automatically based on the value of `inner` but requires that `cached_data`
/// be provided.
///
Expand Down
Loading