Skip to content

Commit

Permalink
Updating library
Browse files Browse the repository at this point in the history
  • Loading branch information
Liby99 committed Apr 11, 2024
1 parent d75151e commit a9c5fa1
Show file tree
Hide file tree
Showing 49 changed files with 828 additions and 43 deletions.
4 changes: 4 additions & 0 deletions core/src/common/input_tag.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@ pub enum DynamicInputTag {
Natural(usize),
Float(f64),
ExclusiveFloat(f64, usize),
FloatWithID(usize, f64),
ExclusiveFloatWithID(usize, f64, usize),
Tensor(DynamicExternalTensor),
}

Expand Down Expand Up @@ -47,6 +49,8 @@ impl std::fmt::Display for DynamicInputTag {
Self::Natural(n) => n.fmt(f),
Self::Float(n) => n.fmt(f),
Self::ExclusiveFloat(n, i) => f.write_str(&format!("{} [ME({})]", n, i)),
Self::FloatWithID(id, n) => f.write_str(&format!("{} [ID({})]", n, id)),
Self::ExclusiveFloatWithID(id, n, i) => f.write_str(&format!("{} [ID({}), ME({})]", n, id, i)),
Self::Tensor(t) => t.fmt(f),
}
}
Expand Down
2 changes: 1 addition & 1 deletion core/src/compiler/back/pretty.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ impl Display for Program {
impl Display for Relation {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
self.attributes.fmt(f)?;
f.write_fmt(format_args!("{}(", self.predicate))?;
f.write_fmt(format_args!(" {}(", self.predicate))?;
for (i, arg) in self.arg_types.iter().enumerate() {
arg.fmt(f)?;
if i < self.arg_types.len() - 1 {
Expand Down
18 changes: 12 additions & 6 deletions core/src/compiler/back/query_plan.rs
Original file line number Diff line number Diff line change
Expand Up @@ -614,9 +614,12 @@ impl<'a> QueryPlanContext<'a> {
};

// Note: We always apply constraint first and then assigns
let node = self.try_apply_non_new_assigns(&mut applied_assigns, node);
let node = self.try_apply_constraint(&mut applied_constraints, node);
let node = self.try_apply_foreign_predicate_atom(&mut applied_foreign_predicates, node);
let node = self.try_apply(
node,
&mut applied_assigns,
&mut applied_constraints,
&mut applied_foreign_predicates,
);
(node, 1)
}
} else {
Expand All @@ -642,9 +645,12 @@ impl<'a> QueryPlanContext<'a> {
}

// Note: We always apply constraint first and then assigns
let node = self.try_apply_non_new_assigns(&mut applied_assigns, node);
let node = self.try_apply_constraint(&mut applied_constraints, node);
let node = self.try_apply_foreign_predicate_atom(&mut applied_foreign_predicates, node);
let node = self.try_apply(
node,
&mut applied_assigns,
&mut applied_constraints,
&mut applied_foreign_predicates,
);
(node, 0)
};

Expand Down
2 changes: 1 addition & 1 deletion core/src/compiler/front/ast/types.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use serde::ser::{Serialize, SerializeStruct, Serializer};
use serde::ser::SerializeStruct;

use crate::common::value_type::*;

Expand Down
6 changes: 5 additions & 1 deletion core/src/compiler/front/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,14 +6,18 @@ pub mod attribute;
mod compile;
mod error;
mod f2b;
mod grammar;
pub mod parser;
mod pretty;
mod source;
mod transform;
pub mod transformations;
mod utils;

// Include grammar (generated file)
// It is okay to have dead code in generated file
#[allow(dead_code)]
mod grammar;

pub use analysis::*;
pub use annotation::*;
use ast::*;
Expand Down
2 changes: 2 additions & 0 deletions core/src/compiler/ram/ram2rs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -555,6 +555,8 @@ fn input_tag_to_rs_input_tag(tag: &DynamicInputTag) -> TokenStream {
DynamicInputTag::Natural(n) => quote! { DynamicInputTag::Natural(#n) },
DynamicInputTag::Float(f) => quote! { DynamicInputTag::Float(#f) },
DynamicInputTag::ExclusiveFloat(f, u) => quote! { DynamicInputTag::ExclusiveFloat(#f, #u) },
DynamicInputTag::FloatWithID(id, f) => quote! { DynamicInputTag::Float(#id, #f) },
DynamicInputTag::ExclusiveFloatWithID(id, f, u) => quote! { DynamicInputTag::ExclusiveFloat(#id, #f, #u) },
DynamicInputTag::Tensor(_) => unimplemented!(),
}
}
Expand Down
1 change: 1 addition & 0 deletions core/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
#![feature(min_specialization)]
#![feature(extract_if)]
#![feature(hash_extract_if)]
#![feature(iter_repeat_n)]
#![feature(proc_macro_span)]

pub mod common;
Expand Down
2 changes: 0 additions & 2 deletions core/src/runtime/env/random.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
use std::sync::*;

use rand::prelude::*;
use rand::rngs::SmallRng;
use rand::SeedableRng;

#[derive(Clone, Debug)]
pub struct Random {
Expand Down
1 change: 0 additions & 1 deletion core/src/runtime/provenance/common/cnf_dnf_formula.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
use super::super::*;
use super::{AsBooleanFormula, Clause, Literal};

#[derive(Clone, Debug, PartialEq, PartialOrd)]
pub enum FormulaKind {
Expand Down
26 changes: 26 additions & 0 deletions core/src/runtime/provenance/common/diff_prob_storage.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,13 @@ impl<T: Clone, P: PointerFamily> DiffProbStorage<T, P> {
}
}

pub fn new_with_placeholder() -> Self {
Self {
storage: P::new_rc_cell(Vec::new()),
num_requires_grad: P::new_cell(1),
}
}

/// Clone the internal storage
pub fn clone_internal(&self) -> Self {
Self {
Expand Down Expand Up @@ -46,6 +53,25 @@ impl<T: Clone, P: PointerFamily> DiffProbStorage<T, P> {
fact_id
}

pub fn add_prob_with_id(&self, prob: f64, external_tag: Option<T>, id: usize) -> usize {
// Increment the `num_requires_grad` if the external tag is provided
if external_tag.is_some() {
P::get_cell_mut(&self.num_requires_grad, |n| *n += 1);
}

// Add
P::get_rc_cell_mut(&self.storage, |s| {
if id >= s.len() {
s.extend(std::iter::repeat_n((0.0, None), id - s.len() + 1));
}

s[id] = (prob.clone(), external_tag.clone())
});

// Return the id
id
}

pub fn get_diff_prob(&self, id: &usize) -> (f64, Option<T>) {
P::get_rc_cell(&self.storage, |d| d[id.clone()].clone())
}
Expand Down
1 change: 0 additions & 1 deletion core/src/runtime/provenance/common/dnf_context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ use std::collections::*;
use itertools::Itertools;
use rand::distributions::WeightedIndex;
use rand::prelude::*;
use rand::rngs::StdRng;

use super::{Clause, DNFFormula, Literal};

Expand Down
1 change: 0 additions & 1 deletion core/src/runtime/provenance/common/dnf_formula.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
use itertools::iproduct;

use super::super::*;
use super::{AsBooleanFormula, Clause, Literal};

#[derive(Clone, PartialEq, PartialOrd, Eq)]
pub struct DNFFormula {
Expand Down
6 changes: 6 additions & 0 deletions core/src/runtime/provenance/common/input_tags/float.rs
Original file line number Diff line number Diff line change
Expand Up @@ -66,3 +66,9 @@ impl<T: FromTensor> ConvertFromInputTag<InputExclusiveDiffProb<T>> for f64 {
Some(t.prob)
}
}

impl<T: FromTensor> ConvertFromInputTag<InputExclusiveDiffProbWithID<T>> for f64 {
fn from_input_tag(t: InputExclusiveDiffProbWithID<T>) -> Option<Self> {
Some(t.prob)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,8 @@ impl<T: FromTensor> StaticInputTag for InputDiffProb<T> {
DynamicInputTag::Exclusive(_) => None,
DynamicInputTag::Float(f) => Some(Self(f.clone(), None)),
DynamicInputTag::ExclusiveFloat(f, _) => Some(Self(f.clone(), None)),
DynamicInputTag::FloatWithID(_, f) => Some(Self(f.clone(), None)),
DynamicInputTag::ExclusiveFloatWithID(_, f, _) => Some(Self(f.clone(), None)),
DynamicInputTag::Tensor(t) => Some(Self(t.get_f64(), T::from_tensor(t.clone()))),
}
}
Expand Down Expand Up @@ -96,3 +98,9 @@ impl<T: FromTensor> ConvertFromInputTag<InputExclusiveDiffProb<T>> for InputDiff
Some(Self(t.prob, None))
}
}

impl<T: FromTensor> ConvertFromInputTag<InputExclusiveDiffProbWithID<T>> for InputDiffProb<T> {
fn from_input_tag(t: InputExclusiveDiffProbWithID<T>) -> Option<Self> {
Some(Self(t.prob, t.external_tag))
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -72,3 +72,12 @@ impl<T: FromTensor> ConvertFromInputTag<InputExclusiveDiffProb<T>> for Exclusion
}
}
}

impl<T: FromTensor> ConvertFromInputTag<InputExclusiveDiffProbWithID<T>> for Exclusion {
fn from_input_tag(t: InputExclusiveDiffProbWithID<T>) -> Option<Self> {
match &t.exclusion {
Some(e) => Some(Self::Exclusive(e.clone())),
None => Some(Self::Independent),
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,16 @@ impl<T: FromTensor> StaticInputTag for InputExclusiveDiffProb<T> {
external_tag: None,
exclusion: Some(i.clone()),
}),
DynamicInputTag::FloatWithID(_, prob) => Some(Self {
prob: prob.clone(),
external_tag: None,
exclusion: None,
}),
DynamicInputTag::ExclusiveFloatWithID(_, prob, i) => Some(Self {
prob: prob.clone(),
external_tag: None,
exclusion: Some(i.clone()),
}),
DynamicInputTag::Tensor(t) => Some(Self {
prob: t.get_f64(),
external_tag: T::from_tensor(t.clone()),
Expand Down Expand Up @@ -145,3 +155,13 @@ impl<T: FromTensor> ConvertFromInputTag<InputExclusiveDiffProb<T>> for InputExcl
Some(t.clone())
}
}

impl<T: FromTensor> ConvertFromInputTag<InputExclusiveDiffProbWithID<T>> for InputExclusiveDiffProb<T> {
fn from_input_tag(t: InputExclusiveDiffProbWithID<T>) -> Option<Self> {
Some(Self {
prob: t.prob,
external_tag: t.external_tag,
exclusion: t.exclusion,
})
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,130 @@
use crate::common::foreign_tensor::*;
use crate::common::input_tag::*;

use super::*;

#[derive(Clone)]
pub struct InputExclusiveDiffProbWithID<T: FromTensor> {
/// The probability of the tag
pub prob: f64,

/// The ID of the tag
pub id: usize,

/// The external tag for differentiability
pub external_tag: Option<T>,

/// An optional identifier of the mutual exclusion
pub exclusion: Option<usize>,
}

impl<T: FromTensor> InputExclusiveDiffProbWithID<T> {
pub fn new(id: usize, prob: f64, tag: T, exclusion: Option<usize>) -> Self {
Self {
id,
prob,
external_tag: Some(tag),
exclusion,
}
}

pub fn new_without_gradient(id: usize, prob: f64, exclusion: Option<usize>) -> Self {
Self {
id,
prob,
external_tag: None,
exclusion,
}
}
}

impl<T: FromTensor> std::fmt::Debug for InputExclusiveDiffProbWithID<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.prob.fmt(f)
}
}

impl<T: FromTensor> From<(usize, f64, T, Option<usize>)> for InputExclusiveDiffProbWithID<T> {
fn from((id, prob, tag, exclusion): (usize, f64, T, Option<usize>)) -> Self {
Self {
id,
prob,
external_tag: Some(tag),
exclusion,
}
}
}

impl<T: FromTensor> StaticInputTag for InputExclusiveDiffProbWithID<T> {
fn from_dynamic_input_tag(t: &DynamicInputTag) -> Option<Self> {
match t {
DynamicInputTag::None => None,
DynamicInputTag::Exclusive(_) => None,
DynamicInputTag::Bool(_) => None,
DynamicInputTag::Natural(_) => None,
DynamicInputTag::Float(_) => None,
DynamicInputTag::ExclusiveFloat(_, _) => None,
DynamicInputTag::FloatWithID(id, prob) => Some(Self {
id: id.clone(),
prob: prob.clone(),
external_tag: None,
exclusion: None,
}),
DynamicInputTag::ExclusiveFloatWithID(id, prob, i) => Some(Self {
id: id.clone(),
prob: prob.clone(),
external_tag: None,
exclusion: Some(i.clone()),
}),
DynamicInputTag::Tensor(_) => None,
}
}
}

impl<T: FromTensor> ConvertFromInputTag<()> for InputExclusiveDiffProbWithID<T> {
fn from_input_tag(_: ()) -> Option<Self> {
None
}
}

impl<T: FromTensor> ConvertFromInputTag<bool> for InputExclusiveDiffProbWithID<T> {
fn from_input_tag(_: bool) -> Option<Self> {
None
}
}

impl<T: FromTensor> ConvertFromInputTag<usize> for InputExclusiveDiffProbWithID<T> {
fn from_input_tag(_: usize) -> Option<Self> {
None
}
}

impl<T: FromTensor> ConvertFromInputTag<Exclusion> for InputExclusiveDiffProbWithID<T> {
fn from_input_tag(_: Exclusion) -> Option<Self> {
None
}
}

impl<T: FromTensor> ConvertFromInputTag<f64> for InputExclusiveDiffProbWithID<T> {
fn from_input_tag(_: f64) -> Option<Self> {
None
}
}

impl<T: FromTensor> ConvertFromInputTag<InputExclusiveProb> for InputExclusiveDiffProbWithID<T> {
fn from_input_tag(_: InputExclusiveProb) -> Option<Self> {
None
}
}

impl<T: FromTensor> ConvertFromInputTag<InputDiffProb<T>> for InputExclusiveDiffProbWithID<T> {
fn from_input_tag(_: InputDiffProb<T>) -> Option<Self> {
None
}
}

impl<T: FromTensor> ConvertFromInputTag<InputExclusiveDiffProbWithID<T>> for InputExclusiveDiffProbWithID<T> {
fn from_input_tag(_: InputExclusiveDiffProbWithID<T>) -> Option<Self> {
None
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -118,3 +118,12 @@ impl<T: FromTensor> ConvertFromInputTag<InputExclusiveDiffProb<T>> for InputExcl
Some(Self::new(t.prob.clone(), t.exclusion.clone()))
}
}

impl<T: FromTensor> ConvertFromInputTag<InputExclusiveDiffProbWithID<T>> for InputExclusiveProb {
fn from_input_tag(t: InputExclusiveDiffProbWithID<T>) -> Option<Self> {
Some(Self {
prob: t.prob,
exclusion: t.exclusion,
})
}
}
Loading

0 comments on commit a9c5fa1

Please sign in to comment.