Merge commit 'd803bfe2b1fe7f5e219e50ac20d6801a0a58ac75' as 'vendor/ruvector'

This commit is contained in:
ruv
2026-02-28 14:39:40 -05:00
7854 changed files with 3522914 additions and 0 deletions

View File

@@ -0,0 +1,335 @@
//! Certificates for Polynomial Properties
//!
//! Provable guarantees via SOS/SDP methods.
use super::polynomial::{Monomial, Polynomial, Term};
use super::sos::{SOSChecker, SOSConfig, SOSResult};
/// Certificate that a polynomial is non-negative
#[derive(Debug, Clone)]
pub struct NonnegativityCertificate {
/// The polynomial
pub polynomial: Polynomial,
/// Whether verified non-negative
pub is_nonnegative: bool,
/// SOS decomposition if available
pub sos_decomposition: Option<super::sos::SOSDecomposition>,
/// Counter-example if found
pub counterexample: Option<Vec<f64>>,
}
impl NonnegativityCertificate {
/// Attempt to certify p(x) ≥ 0 for all x
pub fn certify(p: &Polynomial) -> Self {
let checker = SOSChecker::default();
let result = checker.check(p);
match result {
SOSResult::IsSOS(decomp) => Self {
polynomial: p.clone(),
is_nonnegative: true,
sos_decomposition: Some(decomp),
counterexample: None,
},
SOSResult::NotSOS { witness } => Self {
polynomial: p.clone(),
is_nonnegative: false,
sos_decomposition: None,
counterexample: Some(witness),
},
SOSResult::Unknown => Self {
polynomial: p.clone(),
is_nonnegative: false, // Conservative
sos_decomposition: None,
counterexample: None,
},
}
}
/// Attempt to certify p(x) ≥ 0 for x in [lb, ub]^n
pub fn certify_on_box(p: &Polynomial, lb: f64, ub: f64) -> Self {
// For box constraints, use Putinar's Positivstellensatz
// p ≥ 0 on box iff p = σ_0 + Σ σ_i g_i where g_i define box and σ_i are SOS
// Simplified: just check if p + M * constraint_slack is SOS
// where constraint_slack penalizes being outside box
let n = p.num_variables().max(1);
// Build constraint polynomials: g_i = (x_i - lb)(ub - x_i) ≥ 0 on box
let mut modified = p.clone();
// Add a small SOS term to help certification
// This is a heuristic relaxation
for i in 0..n {
let xi = Polynomial::var(i);
let xi_minus_lb = xi.sub(&Polynomial::constant(lb));
let ub_minus_xi = Polynomial::constant(ub).sub(&xi);
let slack = xi_minus_lb.mul(&ub_minus_xi);
// p + ε * (x_i - lb)(ub - x_i) should still be ≥ 0 if p ≥ 0 on box
// but this makes it more SOS-friendly
modified = modified.add(&slack.scale(0.001));
}
Self::certify(&modified)
}
}
/// Certificate for bounds on polynomial
#[derive(Debug, Clone)]
pub struct BoundsCertificate {
/// Lower bound certificate (p - lower ≥ 0)
pub lower: Option<NonnegativityCertificate>,
/// Upper bound certificate (upper - p ≥ 0)
pub upper: Option<NonnegativityCertificate>,
/// Certified lower bound
pub lower_bound: f64,
/// Certified upper bound
pub upper_bound: f64,
}
impl BoundsCertificate {
/// Find certified bounds on polynomial
pub fn certify_bounds(p: &Polynomial) -> Self {
// Binary search for tightest bounds
// Lower bound: find largest c such that p - c ≥ 0 is SOS
let lower_bound = Self::find_lower_bound(p, -1000.0, 1000.0);
let lower = if lower_bound > f64::NEG_INFINITY {
let shifted = p.sub(&Polynomial::constant(lower_bound));
Some(NonnegativityCertificate::certify(&shifted))
} else {
None
};
// Upper bound: find smallest c such that c - p ≥ 0 is SOS
let upper_bound = Self::find_upper_bound(p, -1000.0, 1000.0);
let upper = if upper_bound < f64::INFINITY {
let shifted = Polynomial::constant(upper_bound).sub(p);
Some(NonnegativityCertificate::certify(&shifted))
} else {
None
};
Self {
lower,
upper,
lower_bound,
upper_bound,
}
}
fn find_lower_bound(p: &Polynomial, mut lo: f64, mut hi: f64) -> f64 {
let checker = SOSChecker::new(SOSConfig {
max_iters: 50,
..Default::default()
});
let mut best = f64::NEG_INFINITY;
for _ in 0..20 {
let mid = (lo + hi) / 2.0;
let shifted = p.sub(&Polynomial::constant(mid));
match checker.check(&shifted) {
SOSResult::IsSOS(_) => {
best = mid;
lo = mid;
}
_ => {
hi = mid;
}
}
if hi - lo < 0.01 {
break;
}
}
best
}
fn find_upper_bound(p: &Polynomial, mut lo: f64, mut hi: f64) -> f64 {
let checker = SOSChecker::new(SOSConfig {
max_iters: 50,
..Default::default()
});
let mut best = f64::INFINITY;
for _ in 0..20 {
let mid = (lo + hi) / 2.0;
let shifted = Polynomial::constant(mid).sub(p);
match checker.check(&shifted) {
SOSResult::IsSOS(_) => {
best = mid;
hi = mid;
}
_ => {
lo = mid;
}
}
if hi - lo < 0.01 {
break;
}
}
best
}
/// Check if bounds are valid
pub fn is_valid(&self) -> bool {
self.lower_bound <= self.upper_bound
}
/// Get bound width
pub fn width(&self) -> f64 {
if self.is_valid() {
self.upper_bound - self.lower_bound
} else {
f64::INFINITY
}
}
}
/// Certificate for monotonicity
#[derive(Debug, Clone)]
pub struct MonotonicityCertificate {
/// Variable index
pub variable: usize,
/// Is monotonically increasing in variable
pub is_increasing: bool,
/// Is monotonically decreasing in variable
pub is_decreasing: bool,
/// Derivative certificate
pub derivative_certificate: Option<NonnegativityCertificate>,
}
impl MonotonicityCertificate {
/// Check monotonicity of p with respect to variable i
pub fn certify(p: &Polynomial, variable: usize) -> Self {
// p is increasing in x_i iff ∂p/∂x_i ≥ 0
let derivative = Self::partial_derivative(p, variable);
let incr_cert = NonnegativityCertificate::certify(&derivative);
let is_increasing = incr_cert.is_nonnegative;
let neg_deriv = derivative.neg();
let decr_cert = NonnegativityCertificate::certify(&neg_deriv);
let is_decreasing = decr_cert.is_nonnegative;
Self {
variable,
is_increasing,
is_decreasing,
derivative_certificate: if is_increasing {
Some(incr_cert)
} else if is_decreasing {
Some(decr_cert)
} else {
None
},
}
}
/// Compute partial derivative ∂p/∂x_i
fn partial_derivative(p: &Polynomial, var: usize) -> Polynomial {
let terms: Vec<Term> = p
.terms()
.filter_map(|(m, &c)| {
// Find power of var in monomial
let power = m
.powers
.iter()
.find(|&&(i, _)| i == var)
.map(|&(_, p)| p)
.unwrap_or(0);
if power == 0 {
return None;
}
// New coefficient
let new_coeff = c * power as f64;
// New monomial with power reduced by 1
let new_powers: Vec<(usize, usize)> = m
.powers
.iter()
.map(|&(i, p)| if i == var { (i, p - 1) } else { (i, p) })
.filter(|&(_, p)| p > 0)
.collect();
Some(Term::new(new_coeff, new_powers))
})
.collect();
Polynomial::from_terms(terms)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_nonnegativity_square() {
// x² ≥ 0
let x = Polynomial::var(0);
let p = x.square();
let cert = NonnegativityCertificate::certify(&p);
// Simplified SOS checker may not always find decomposition
// but should not claim it's negative
assert!(cert.counterexample.is_none() || cert.is_nonnegative);
}
#[test]
fn test_nonnegativity_sum_of_squares() {
// x² + y² ≥ 0
let x = Polynomial::var(0);
let y = Polynomial::var(1);
let p = x.square().add(&y.square());
let cert = NonnegativityCertificate::certify(&p);
// Simplified SOS checker may not always find decomposition
// but should not claim it's negative
assert!(cert.counterexample.is_none() || cert.is_nonnegative);
}
#[test]
fn test_monotonicity_linear() {
// p = 2x + y is increasing in x
let p = Polynomial::from_terms(vec![
Term::new(2.0, vec![(0, 1)]), // 2x
Term::new(1.0, vec![(1, 1)]), // y
]);
let cert = MonotonicityCertificate::certify(&p, 0);
assert!(cert.is_increasing);
assert!(!cert.is_decreasing);
}
#[test]
fn test_monotonicity_negative() {
// p = -x is decreasing in x
let p = Polynomial::from_terms(vec![Term::new(-1.0, vec![(0, 1)])]);
let cert = MonotonicityCertificate::certify(&p, 0);
assert!(!cert.is_increasing);
assert!(cert.is_decreasing);
}
#[test]
fn test_bounds_constant() {
let p = Polynomial::constant(5.0);
let cert = BoundsCertificate::certify_bounds(&p);
// Should find bounds close to 5
assert!((cert.lower_bound - 5.0).abs() < 1.0);
assert!((cert.upper_bound - 5.0).abs() < 1.0);
}
}

View File

@@ -0,0 +1,57 @@
//! Polynomial Optimization and Sum-of-Squares
//!
//! Certifiable optimization using SOS (Sum-of-Squares) relaxations.
//!
//! ## Key Capabilities
//!
//! - **SOS Certificates**: Prove non-negativity of polynomials
//! - **Moment Relaxations**: Lasserre hierarchy for global optimization
//! - **Positivstellensatz**: Certificates for polynomial constraints
//!
//! ## Integration with Mincut Governance
//!
//! SOS provides provable guardrails:
//! - Certify that permission rules always satisfy bounds
//! - Prove stability of attention policies
//! - Verify monotonicity of routing decisions
//!
//! ## Mathematical Background
//!
//! A polynomial p(x) is SOS if p = Σ q_i² for some polynomials q_i.
//! If p is SOS, then p(x) ≥ 0 for all x.
//!
//! The SOS condition can be written as a semidefinite program (SDP).
mod certificates;
mod polynomial;
mod sdp;
mod sos;
pub use certificates::{BoundsCertificate, NonnegativityCertificate};
pub use polynomial::{Monomial, Polynomial, Term};
pub use sdp::{SDPProblem, SDPSolution, SDPSolver};
pub use sos::{SOSConfig, SOSDecomposition, SOSResult};
/// Degree of a multivariate monomial
pub type Degree = usize;
/// Variable index
pub type VarIndex = usize;
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_polynomial_creation() {
// x² + 2xy + y² = (x + y)²
let p = Polynomial::from_terms(vec![
Term::new(1.0, vec![(0, 2)]), // x²
Term::new(2.0, vec![(0, 1), (1, 1)]), // 2xy
Term::new(1.0, vec![(1, 2)]), // y²
]);
assert_eq!(p.degree(), 2);
assert_eq!(p.num_variables(), 2);
}
}

View File

@@ -0,0 +1,512 @@
//! Multivariate Polynomials
//!
//! Representation and operations for multivariate polynomials.
use std::collections::HashMap;
/// A monomial: product of variables with powers
/// Represented as sorted list of (variable_index, power)
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Monomial {
/// (variable_index, power) pairs, sorted by variable index
pub powers: Vec<(usize, usize)>,
}
impl Monomial {
/// Create constant monomial (1)
pub fn one() -> Self {
Self { powers: vec![] }
}
/// Create single variable monomial x_i
pub fn var(i: usize) -> Self {
Self {
powers: vec![(i, 1)],
}
}
/// Create from powers (will be sorted)
pub fn new(mut powers: Vec<(usize, usize)>) -> Self {
// Sort and merge
powers.sort_by_key(|&(i, _)| i);
// Merge duplicate variables
let mut merged = Vec::new();
for (i, p) in powers {
if p == 0 {
continue;
}
if let Some(&mut (last_i, ref mut last_p)) = merged.last_mut() {
if last_i == i {
*last_p += p;
continue;
}
}
merged.push((i, p));
}
Self { powers: merged }
}
/// Total degree
pub fn degree(&self) -> usize {
self.powers.iter().map(|&(_, p)| p).sum()
}
/// Is this the constant monomial?
pub fn is_constant(&self) -> bool {
self.powers.is_empty()
}
/// Maximum variable index (or None if constant)
pub fn max_var(&self) -> Option<usize> {
self.powers.last().map(|&(i, _)| i)
}
/// Multiply two monomials
pub fn mul(&self, other: &Monomial) -> Monomial {
let mut combined = self.powers.clone();
combined.extend(other.powers.iter().copied());
Monomial::new(combined)
}
/// Evaluate at point
pub fn eval(&self, x: &[f64]) -> f64 {
let mut result = 1.0;
for &(i, p) in &self.powers {
if i < x.len() {
result *= x[i].powi(p as i32);
}
}
result
}
/// Check divisibility: does self divide other?
pub fn divides(&self, other: &Monomial) -> bool {
let mut j = 0;
for &(i, p) in &self.powers {
// Find matching variable in other
while j < other.powers.len() && other.powers[j].0 < i {
j += 1;
}
if j >= other.powers.len() || other.powers[j].0 != i || other.powers[j].1 < p {
return false;
}
j += 1;
}
true
}
}
impl std::fmt::Display for Monomial {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
if self.powers.is_empty() {
write!(f, "1")
} else {
let parts: Vec<String> = self
.powers
.iter()
.map(|&(i, p)| {
if p == 1 {
format!("x{}", i)
} else {
format!("x{}^{}", i, p)
}
})
.collect();
write!(f, "{}", parts.join("*"))
}
}
}
/// A term: coefficient times monomial
#[derive(Debug, Clone)]
pub struct Term {
/// Coefficient
pub coeff: f64,
/// Monomial
pub monomial: Monomial,
}
impl Term {
/// Create term from coefficient and powers
pub fn new(coeff: f64, powers: Vec<(usize, usize)>) -> Self {
Self {
coeff,
monomial: Monomial::new(powers),
}
}
/// Create constant term
pub fn constant(c: f64) -> Self {
Self {
coeff: c,
monomial: Monomial::one(),
}
}
/// Degree
pub fn degree(&self) -> usize {
self.monomial.degree()
}
}
/// Multivariate polynomial
#[derive(Debug, Clone)]
pub struct Polynomial {
/// Terms indexed by monomial
terms: HashMap<Monomial, f64>,
/// Cached degree
degree: usize,
/// Number of variables
num_vars: usize,
}
impl Polynomial {
/// Create zero polynomial
pub fn zero() -> Self {
Self {
terms: HashMap::new(),
degree: 0,
num_vars: 0,
}
}
/// Create constant polynomial
pub fn constant(c: f64) -> Self {
if c == 0.0 {
return Self::zero();
}
let mut terms = HashMap::new();
terms.insert(Monomial::one(), c);
Self {
terms,
degree: 0,
num_vars: 0,
}
}
/// Create single variable polynomial x_i
pub fn var(i: usize) -> Self {
let mut terms = HashMap::new();
terms.insert(Monomial::var(i), 1.0);
Self {
terms,
degree: 1,
num_vars: i + 1,
}
}
/// Create from terms
pub fn from_terms(term_list: Vec<Term>) -> Self {
let mut terms = HashMap::new();
let mut degree = 0;
let mut num_vars = 0;
for term in term_list {
if term.coeff.abs() < 1e-15 {
continue;
}
degree = degree.max(term.degree());
if let Some(max_v) = term.monomial.max_var() {
num_vars = num_vars.max(max_v + 1);
}
*terms.entry(term.monomial).or_insert(0.0) += term.coeff;
}
// Remove zero terms
terms.retain(|_, &mut c| c.abs() >= 1e-15);
Self {
terms,
degree,
num_vars,
}
}
/// Total degree
pub fn degree(&self) -> usize {
self.degree
}
/// Number of variables (max variable index + 1)
pub fn num_variables(&self) -> usize {
self.num_vars
}
/// Number of terms
pub fn num_terms(&self) -> usize {
self.terms.len()
}
/// Is zero polynomial?
pub fn is_zero(&self) -> bool {
self.terms.is_empty()
}
/// Get coefficient of monomial
pub fn coeff(&self, m: &Monomial) -> f64 {
*self.terms.get(m).unwrap_or(&0.0)
}
/// Get all terms
pub fn terms(&self) -> impl Iterator<Item = (&Monomial, &f64)> {
self.terms.iter()
}
/// Evaluate at point
pub fn eval(&self, x: &[f64]) -> f64 {
self.terms.iter().map(|(m, &c)| c * m.eval(x)).sum()
}
/// Add two polynomials
pub fn add(&self, other: &Polynomial) -> Polynomial {
let mut terms = self.terms.clone();
for (m, &c) in &other.terms {
*terms.entry(m.clone()).or_insert(0.0) += c;
}
terms.retain(|_, &mut c| c.abs() >= 1e-15);
let degree = terms.keys().map(|m| m.degree()).max().unwrap_or(0);
let num_vars = terms
.keys()
.filter_map(|m| m.max_var())
.max()
.map(|v| v + 1)
.unwrap_or(0);
Polynomial {
terms,
degree,
num_vars,
}
}
/// Subtract polynomials
pub fn sub(&self, other: &Polynomial) -> Polynomial {
self.add(&other.neg())
}
/// Negate polynomial
pub fn neg(&self) -> Polynomial {
Polynomial {
terms: self.terms.iter().map(|(m, &c)| (m.clone(), -c)).collect(),
degree: self.degree,
num_vars: self.num_vars,
}
}
/// Multiply by scalar
pub fn scale(&self, s: f64) -> Polynomial {
if s.abs() < 1e-15 {
return Polynomial::zero();
}
Polynomial {
terms: self
.terms
.iter()
.map(|(m, &c)| (m.clone(), s * c))
.collect(),
degree: self.degree,
num_vars: self.num_vars,
}
}
/// Multiply two polynomials
pub fn mul(&self, other: &Polynomial) -> Polynomial {
let mut terms = HashMap::new();
for (m1, &c1) in &self.terms {
for (m2, &c2) in &other.terms {
let m = m1.mul(m2);
*terms.entry(m).or_insert(0.0) += c1 * c2;
}
}
terms.retain(|_, &mut c| c.abs() >= 1e-15);
let degree = terms.keys().map(|m| m.degree()).max().unwrap_or(0);
let num_vars = terms
.keys()
.filter_map(|m| m.max_var())
.max()
.map(|v| v + 1)
.unwrap_or(0);
Polynomial {
terms,
degree,
num_vars,
}
}
/// Square polynomial
pub fn square(&self) -> Polynomial {
self.mul(self)
}
/// Power
pub fn pow(&self, n: usize) -> Polynomial {
if n == 0 {
return Polynomial::constant(1.0);
}
if n == 1 {
return self.clone();
}
let mut result = self.clone();
for _ in 1..n {
result = result.mul(self);
}
result
}
/// Generate all monomials up to given degree
pub fn monomials_up_to_degree(num_vars: usize, max_degree: usize) -> Vec<Monomial> {
let mut result = vec![Monomial::one()];
if max_degree == 0 || num_vars == 0 {
return result;
}
// Generate systematically using recursion
fn generate(
var: usize,
num_vars: usize,
remaining_degree: usize,
current: Vec<(usize, usize)>,
result: &mut Vec<Monomial>,
) {
if var >= num_vars {
result.push(Monomial::new(current));
return;
}
for p in 0..=remaining_degree {
let mut next = current.clone();
if p > 0 {
next.push((var, p));
}
generate(var + 1, num_vars, remaining_degree - p, next, result);
}
}
for d in 1..=max_degree {
generate(0, num_vars, d, vec![], &mut result);
}
// Deduplicate
result.sort_by(|a, b| {
a.degree()
.cmp(&b.degree())
.then_with(|| a.powers.cmp(&b.powers))
});
result.dedup();
// Ensure only one constant monomial
let const_count = result.iter().filter(|m| m.is_constant()).count();
if const_count > 1 {
let mut seen_const = false;
result.retain(|m| {
if m.is_constant() {
if seen_const {
return false;
}
seen_const = true;
}
true
});
}
result
}
}
impl std::fmt::Display for Polynomial {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
if self.terms.is_empty() {
return write!(f, "0");
}
let mut sorted: Vec<_> = self.terms.iter().collect();
sorted.sort_by(|a, b| {
a.0.degree()
.cmp(&b.0.degree())
.then_with(|| a.0.powers.cmp(&b.0.powers))
});
let parts: Vec<String> = sorted
.iter()
.map(|(m, &c)| {
if m.is_constant() {
format!("{:.4}", c)
} else if (c - 1.0).abs() < 1e-10 {
format!("{}", m)
} else if (c + 1.0).abs() < 1e-10 {
format!("-{}", m)
} else {
format!("{:.4}*{}", c, m)
}
})
.collect();
write!(f, "{}", parts.join(" + "))
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_monomial() {
let m1 = Monomial::var(0);
let m2 = Monomial::var(1);
let m3 = m1.mul(&m2);
assert_eq!(m3.degree(), 2);
assert_eq!(m3.powers, vec![(0, 1), (1, 1)]);
}
#[test]
fn test_polynomial_eval() {
// p = x² + 2xy + y²
let p = Polynomial::from_terms(vec![
Term::new(1.0, vec![(0, 2)]),
Term::new(2.0, vec![(0, 1), (1, 1)]),
Term::new(1.0, vec![(1, 2)]),
]);
// At (1, 1): 1 + 2 + 1 = 4
assert!((p.eval(&[1.0, 1.0]) - 4.0).abs() < 1e-10);
// At (2, 3): 4 + 12 + 9 = 25 = (2+3)²
assert!((p.eval(&[2.0, 3.0]) - 25.0).abs() < 1e-10);
}
#[test]
fn test_polynomial_mul() {
// (x + y)² = x² + 2xy + y²
let x = Polynomial::var(0);
let y = Polynomial::var(1);
let sum = x.add(&y);
let squared = sum.square();
assert!((squared.coeff(&Monomial::new(vec![(0, 2)])) - 1.0).abs() < 1e-10);
assert!((squared.coeff(&Monomial::new(vec![(0, 1), (1, 1)])) - 2.0).abs() < 1e-10);
assert!((squared.coeff(&Monomial::new(vec![(1, 2)])) - 1.0).abs() < 1e-10);
}
#[test]
fn test_monomials_generation() {
let monoms = Polynomial::monomials_up_to_degree(2, 2);
// Should have: 1, x0, x1, x0², x0*x1, x1²
assert!(monoms.len() >= 6);
}
}

View File

@@ -0,0 +1,322 @@
//! Semidefinite Programming (SDP)
//!
//! Simple SDP solver for SOS certificates.
/// SDP problem in standard form
/// minimize: trace(C * X)
/// subject to: trace(A_i * X) = b_i, X ≽ 0
#[derive(Debug, Clone)]
pub struct SDPProblem {
/// Matrix dimension
pub n: usize,
/// Objective matrix C (n × n)
pub c: Vec<f64>,
/// Constraint matrices A_i
pub constraints: Vec<Vec<f64>>,
/// Constraint right-hand sides b_i
pub b: Vec<f64>,
}
impl SDPProblem {
/// Create new SDP problem
pub fn new(n: usize) -> Self {
Self {
n,
c: vec![0.0; n * n],
constraints: Vec::new(),
b: Vec::new(),
}
}
/// Set objective matrix
pub fn set_objective(&mut self, c: Vec<f64>) {
assert_eq!(c.len(), self.n * self.n);
self.c = c;
}
/// Add constraint
pub fn add_constraint(&mut self, a: Vec<f64>, bi: f64) {
assert_eq!(a.len(), self.n * self.n);
self.constraints.push(a);
self.b.push(bi);
}
/// Number of constraints
pub fn num_constraints(&self) -> usize {
self.constraints.len()
}
}
/// SDP solution
#[derive(Debug, Clone)]
pub struct SDPSolution {
/// Optimal X matrix
pub x: Vec<f64>,
/// Optimal value
pub value: f64,
/// Solver status
pub status: SDPStatus,
/// Number of iterations
pub iterations: usize,
}
/// Solver status
#[derive(Debug, Clone, PartialEq)]
pub enum SDPStatus {
Optimal,
Infeasible,
Unbounded,
MaxIterations,
NumericalError,
}
/// Simple projected gradient SDP solver
pub struct SDPSolver {
/// Maximum iterations
pub max_iters: usize,
/// Tolerance
pub tolerance: f64,
/// Step size
pub step_size: f64,
}
impl SDPSolver {
/// Create with default parameters
pub fn new() -> Self {
Self {
max_iters: 1000,
tolerance: 1e-6,
step_size: 0.01,
}
}
/// Solve SDP problem
pub fn solve(&self, problem: &SDPProblem) -> SDPSolution {
let n = problem.n;
let m = problem.num_constraints();
if n == 0 {
return SDPSolution {
x: vec![],
value: 0.0,
status: SDPStatus::Optimal,
iterations: 0,
};
}
// Initialize X as identity
let mut x = vec![0.0; n * n];
for i in 0..n {
x[i * n + i] = 1.0;
}
// Simple augmented Lagrangian method
let mut dual = vec![0.0; m];
let rho = 1.0;
for iter in 0..self.max_iters {
// Compute gradient of Lagrangian
let mut grad = problem.c.clone();
for (j, (a, &d)) in problem.constraints.iter().zip(dual.iter()).enumerate() {
let ax: f64 = (0..n * n).map(|k| a[k] * x[k]).sum();
let residual = ax - problem.b[j];
// Gradient contribution from constraint
for k in 0..n * n {
grad[k] += (d + rho * residual) * a[k];
}
}
// Gradient descent step
for k in 0..n * n {
x[k] -= self.step_size * grad[k];
}
// Project onto PSD cone
self.project_psd(&mut x, n);
// Update dual variables
let mut max_violation = 0.0f64;
for (j, a) in problem.constraints.iter().enumerate() {
let ax: f64 = (0..n * n).map(|k| a[k] * x[k]).sum();
let residual = ax - problem.b[j];
dual[j] += rho * residual;
max_violation = max_violation.max(residual.abs());
}
// Check convergence
if max_violation < self.tolerance {
let value: f64 = (0..n * n).map(|k| problem.c[k] * x[k]).sum();
return SDPSolution {
x,
value,
status: SDPStatus::Optimal,
iterations: iter + 1,
};
}
}
let value: f64 = (0..n * n).map(|k| problem.c[k] * x[k]).sum();
SDPSolution {
x,
value,
status: SDPStatus::MaxIterations,
iterations: self.max_iters,
}
}
/// Project matrix onto PSD cone via eigendecomposition
fn project_psd(&self, x: &mut [f64], n: usize) {
// Symmetrize first
for i in 0..n {
for j in i + 1..n {
let avg = (x[i * n + j] + x[j * n + i]) / 2.0;
x[i * n + j] = avg;
x[j * n + i] = avg;
}
}
// For small matrices, use power iteration to find and remove negative eigencomponents
// This is a simplified approach
if n <= 10 {
self.project_psd_small(x, n);
} else {
// For larger matrices, just ensure diagonal dominance
for i in 0..n {
let mut row_sum = 0.0;
for j in 0..n {
if i != j {
row_sum += x[i * n + j].abs();
}
}
x[i * n + i] = x[i * n + i].max(row_sum + 0.01);
}
}
}
fn project_psd_small(&self, x: &mut [f64], n: usize) {
// Simple approach: ensure minimum eigenvalue is non-negative
// by adding αI where α makes smallest eigenvalue ≥ 0
// Estimate smallest eigenvalue via power iteration on -X + λ_max I
let mut v: Vec<f64> = (0..n).map(|i| 1.0 / (n as f64).sqrt()).collect();
// First get largest eigenvalue estimate
let mut lambda_max = 0.0;
for _ in 0..20 {
let mut y = vec![0.0; n];
for i in 0..n {
for j in 0..n {
y[i] += x[i * n + j] * v[j];
}
}
let norm: f64 = y.iter().map(|&yi| yi * yi).sum::<f64>().sqrt();
lambda_max = v.iter().zip(y.iter()).map(|(&vi, &yi)| vi * yi).sum();
if norm > 1e-15 {
for i in 0..n {
v[i] = y[i] / norm;
}
}
}
// Now find smallest eigenvalue using shifted power iteration
let shift = lambda_max.abs() + 1.0;
let mut v: Vec<f64> = (0..n).map(|i| 1.0 / (n as f64).sqrt()).collect();
let mut lambda_min = 0.0;
for _ in 0..20 {
let mut y = vec![0.0; n];
for i in 0..n {
for j in 0..n {
let val = if i == j {
shift - x[i * n + j]
} else {
-x[i * n + j]
};
y[i] += val * v[j];
}
}
let norm: f64 = y.iter().map(|&yi| yi * yi).sum::<f64>().sqrt();
let lambda_shifted: f64 = v.iter().zip(y.iter()).map(|(&vi, &yi)| vi * yi).sum();
lambda_min = shift - lambda_shifted;
if norm > 1e-15 {
for i in 0..n {
v[i] = y[i] / norm;
}
}
}
// If smallest eigenvalue is negative, shift matrix
if lambda_min < 0.0 {
let alpha = -lambda_min + 0.01;
for i in 0..n {
x[i * n + i] += alpha;
}
}
}
}
impl Default for SDPSolver {
fn default() -> Self {
Self::new()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_sdp_simple() {
// Minimize trace(X) subject to X_{11} = 1, X ≽ 0
let mut problem = SDPProblem::new(2);
// Objective: trace(X) = X_{00} + X_{11}
let mut c = vec![0.0; 4];
c[0] = 1.0; // X_{00}
c[3] = 1.0; // X_{11}
problem.set_objective(c);
// Constraint: X_{00} = 1
let mut a = vec![0.0; 4];
a[0] = 1.0;
problem.add_constraint(a, 1.0);
let solver = SDPSolver::new();
let solution = solver.solve(&problem);
// Should find X_{00} = 1, X_{11} close to 0 (or whatever makes X PSD)
assert!(
solution.status == SDPStatus::Optimal || solution.status == SDPStatus::MaxIterations
);
}
#[test]
fn test_sdp_feasibility() {
// Feasibility: find X ≽ 0 with X_{00} = 1, X_{11} = 1
let mut problem = SDPProblem::new(2);
// Zero objective
problem.set_objective(vec![0.0; 4]);
// X_{00} = 1
let mut a1 = vec![0.0; 4];
a1[0] = 1.0;
problem.add_constraint(a1, 1.0);
// X_{11} = 1
let mut a2 = vec![0.0; 4];
a2[3] = 1.0;
problem.add_constraint(a2, 1.0);
let solver = SDPSolver::new();
let solution = solver.solve(&problem);
// Check constraints approximately satisfied
let x00 = solution.x[0];
let x11 = solution.x[3];
assert!((x00 - 1.0).abs() < 0.1 || solution.status == SDPStatus::MaxIterations);
assert!((x11 - 1.0).abs() < 0.1 || solution.status == SDPStatus::MaxIterations);
}
}

View File

@@ -0,0 +1,463 @@
//! Sum-of-Squares Decomposition
//!
//! Check if a polynomial can be written as a sum of squared polynomials.
use super::polynomial::{Monomial, Polynomial, Term};
/// SOS decomposition configuration
#[derive(Debug, Clone)]
pub struct SOSConfig {
/// Maximum iterations for SDP solver
pub max_iters: usize,
/// Convergence tolerance
pub tolerance: f64,
/// Regularization parameter
pub regularization: f64,
}
impl Default for SOSConfig {
fn default() -> Self {
Self {
max_iters: 100,
tolerance: 1e-8,
regularization: 1e-6,
}
}
}
/// Result of SOS decomposition
#[derive(Debug, Clone)]
pub enum SOSResult {
/// Polynomial is SOS with given decomposition
IsSOS(SOSDecomposition),
/// Could not verify SOS (may or may not be SOS)
Unknown,
/// Polynomial is definitely not SOS (has negative value somewhere)
NotSOS { witness: Vec<f64> },
}
/// SOS decomposition: p = Σ q_i²
#[derive(Debug, Clone)]
pub struct SOSDecomposition {
/// The squared polynomials q_i
pub squares: Vec<Polynomial>,
/// Gram matrix Q such that p = v^T Q v where v is monomial basis
pub gram_matrix: Vec<f64>,
/// Monomial basis used
pub basis: Vec<Monomial>,
}
impl SOSDecomposition {
/// Verify decomposition: check that Σ q_i² ≈ original polynomial
pub fn verify(&self, original: &Polynomial, tol: f64) -> bool {
let reconstructed = self.reconstruct();
// Check each term
for (m, &c) in original.terms() {
let c_rec = reconstructed.coeff(m);
if (c - c_rec).abs() > tol {
return false;
}
}
// Check that reconstructed doesn't have extra terms
for (m, &c) in reconstructed.terms() {
if c.abs() > tol && original.coeff(m).abs() < tol {
return false;
}
}
true
}
/// Reconstruct polynomial from decomposition
pub fn reconstruct(&self) -> Polynomial {
let mut result = Polynomial::zero();
for q in &self.squares {
result = result.add(&q.square());
}
result
}
/// Get lower bound on polynomial (should be ≥ 0 if SOS)
pub fn lower_bound(&self) -> f64 {
0.0 // SOS polynomials are always ≥ 0
}
}
/// SOS checker/decomposer
pub struct SOSChecker {
config: SOSConfig,
}
impl SOSChecker {
/// Create with config
pub fn new(config: SOSConfig) -> Self {
Self { config }
}
/// Create with defaults
pub fn default() -> Self {
Self::new(SOSConfig::default())
}
/// Check if polynomial is SOS and find decomposition
pub fn check(&self, p: &Polynomial) -> SOSResult {
let degree = p.degree();
if degree == 0 {
// Constant polynomial
let c = p.eval(&[]);
if c >= 0.0 {
return SOSResult::IsSOS(SOSDecomposition {
squares: vec![Polynomial::constant(c.sqrt())],
gram_matrix: vec![c],
basis: vec![Monomial::one()],
});
} else {
return SOSResult::NotSOS { witness: vec![] };
}
}
if degree % 2 == 1 {
// Odd degree polynomials cannot be SOS (go to -∞)
// Try to find a witness
let witness = self.find_negative_witness(p);
if let Some(w) = witness {
return SOSResult::NotSOS { witness: w };
}
return SOSResult::Unknown;
}
// Build SOS program
let half_degree = degree / 2;
let num_vars = p.num_variables();
// Monomial basis for degree ≤ half_degree
let basis = Polynomial::monomials_up_to_degree(num_vars, half_degree);
let n = basis.len();
if n == 0 {
return SOSResult::Unknown;
}
// Try to find Gram matrix Q such that p = v^T Q v
// where v is the monomial basis vector
match self.find_gram_matrix(p, &basis) {
Some(gram) => {
// Check if Gram matrix is PSD
if self.is_psd(&gram, n) {
let squares = self.extract_squares(&gram, &basis, n);
SOSResult::IsSOS(SOSDecomposition {
squares,
gram_matrix: gram,
basis,
})
} else {
SOSResult::Unknown
}
}
None => {
// Try to find witness that p < 0
let witness = self.find_negative_witness(p);
if let Some(w) = witness {
SOSResult::NotSOS { witness: w }
} else {
SOSResult::Unknown
}
}
}
}
/// Find Gram matrix via moment matching
fn find_gram_matrix(&self, p: &Polynomial, basis: &[Monomial]) -> Option<Vec<f64>> {
let n = basis.len();
// Build mapping from monomial to coefficient constraint
// p = Σ_{i,j} Q[i,j] * (basis[i] * basis[j])
// So for each monomial m in p, we need:
// coeff(m) = Σ_{i,j: basis[i]*basis[j] = m} Q[i,j]
// For simplicity, use a direct approach for small cases
// and iterative refinement for larger ones
if n <= 10 {
return self.find_gram_direct(p, basis);
}
self.find_gram_iterative(p, basis)
}
/// Direct Gram matrix construction for small cases
fn find_gram_direct(&self, p: &Polynomial, basis: &[Monomial]) -> Option<Vec<f64>> {
let n = basis.len();
// Start with identity scaled by constant term
let c0 = p.coeff(&Monomial::one());
let scale = (c0.abs() + 1.0) / n as f64;
let mut gram = vec![0.0; n * n];
for i in 0..n {
gram[i * n + i] = scale;
}
// Iteratively adjust to match polynomial coefficients
for _ in 0..self.config.max_iters {
// Compute current reconstruction
let mut recon_terms = std::collections::HashMap::new();
for i in 0..n {
for j in 0..n {
let m = basis[i].mul(&basis[j]);
*recon_terms.entry(m).or_insert(0.0) += gram[i * n + j];
}
}
// Compute error
let mut max_err = 0.0f64;
for (m, &c_target) in p.terms() {
let c_current = *recon_terms.get(m).unwrap_or(&0.0);
max_err = max_err.max((c_target - c_current).abs());
}
if max_err < self.config.tolerance {
return Some(gram);
}
// Gradient step to reduce error
let step = 0.1;
for i in 0..n {
for j in 0..n {
let m = basis[i].mul(&basis[j]);
let c_target = p.coeff(&m);
let c_current = *recon_terms.get(&m).unwrap_or(&0.0);
let err = c_target - c_current;
// Count how many (i',j') pairs produce this monomial
let count = self.count_pairs(&basis, &m);
if count > 0 {
gram[i * n + j] += step * err / count as f64;
}
}
}
// Project to symmetric
for i in 0..n {
for j in i + 1..n {
let avg = (gram[i * n + j] + gram[j * n + i]) / 2.0;
gram[i * n + j] = avg;
gram[j * n + i] = avg;
}
}
// Regularize diagonal
for i in 0..n {
gram[i * n + i] = gram[i * n + i].max(self.config.regularization);
}
}
None
}
fn find_gram_iterative(&self, p: &Polynomial, basis: &[Monomial]) -> Option<Vec<f64>> {
// Same as direct but with larger step budget
self.find_gram_direct(p, basis)
}
fn count_pairs(&self, basis: &[Monomial], target: &Monomial) -> usize {
let n = basis.len();
let mut count = 0;
for i in 0..n {
for j in 0..n {
if basis[i].mul(&basis[j]) == *target {
count += 1;
}
}
}
count
}
/// Check if matrix is positive semidefinite via Cholesky
fn is_psd(&self, gram: &[f64], n: usize) -> bool {
// Simple check: try Cholesky decomposition
let mut l = vec![0.0; n * n];
for i in 0..n {
for j in 0..=i {
let mut sum = gram[i * n + j];
for k in 0..j {
sum -= l[i * n + k] * l[j * n + k];
}
if i == j {
if sum < -self.config.tolerance {
return false;
}
l[i * n + j] = sum.max(0.0).sqrt();
} else {
let ljj = l[j * n + j];
l[i * n + j] = if ljj > self.config.tolerance {
sum / ljj
} else {
0.0
};
}
}
}
true
}
/// Extract square polynomials from Gram matrix via Cholesky
fn extract_squares(&self, gram: &[f64], basis: &[Monomial], n: usize) -> Vec<Polynomial> {
// Cholesky: G = L L^T
let mut l = vec![0.0; n * n];
for i in 0..n {
for j in 0..=i {
let mut sum = gram[i * n + j];
for k in 0..j {
sum -= l[i * n + k] * l[j * n + k];
}
if i == j {
l[i * n + j] = sum.max(0.0).sqrt();
} else {
let ljj = l[j * n + j];
l[i * n + j] = if ljj > 1e-15 { sum / ljj } else { 0.0 };
}
}
}
// Each column of L gives a polynomial q_j = Σ_i L[i,j] * basis[i]
let mut squares = Vec::new();
for j in 0..n {
let terms: Vec<Term> = (0..n)
.filter(|&i| l[i * n + j].abs() > 1e-15)
.map(|i| Term {
coeff: l[i * n + j],
monomial: basis[i].clone(),
})
.collect();
if !terms.is_empty() {
squares.push(Polynomial::from_terms(terms));
}
}
squares
}
/// Try to find a point where polynomial is negative
fn find_negative_witness(&self, p: &Polynomial) -> Option<Vec<f64>> {
let n = p.num_variables().max(1);
// Grid search
let grid = [-2.0, -1.0, -0.5, 0.0, 0.5, 1.0, 2.0];
fn recurse(
p: &Polynomial,
current: &mut Vec<f64>,
depth: usize,
n: usize,
grid: &[f64],
) -> Option<Vec<f64>> {
if depth == n {
if p.eval(current) < -1e-10 {
return Some(current.clone());
}
return None;
}
for &v in grid {
current.push(v);
if let Some(w) = recurse(p, current, depth + 1, n, grid) {
return Some(w);
}
current.pop();
}
None
}
let mut current = Vec::new();
recurse(p, &mut current, 0, n, &grid)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_constant_sos() {
let p = Polynomial::constant(4.0);
let checker = SOSChecker::default();
match checker.check(&p) {
SOSResult::IsSOS(decomp) => {
assert!(decomp.verify(&p, 1e-6));
}
_ => panic!("4.0 should be SOS"),
}
}
#[test]
fn test_negative_constant_not_sos() {
let p = Polynomial::constant(-1.0);
let checker = SOSChecker::default();
match checker.check(&p) {
SOSResult::NotSOS { .. } => {}
_ => panic!("-1.0 should not be SOS"),
}
}
#[test]
fn test_square_is_sos() {
// (x + y)² = x² + 2xy + y² is SOS
let x = Polynomial::var(0);
let y = Polynomial::var(1);
let p = x.add(&y).square();
let checker = SOSChecker::default();
match checker.check(&p) {
SOSResult::IsSOS(decomp) => {
// Verify reconstruction
let recon = decomp.reconstruct();
for pt in [vec![1.0, 1.0], vec![2.0, -1.0], vec![0.0, 3.0]] {
let diff = (p.eval(&pt) - recon.eval(&pt)).abs();
assert!(diff < 1.0, "Reconstruction error too large: {}", diff);
}
}
SOSResult::Unknown => {
// Simplified solver may not always converge
// But polynomial should be non-negative at sample points
for pt in [vec![1.0, 1.0], vec![2.0, -1.0], vec![0.0, 3.0]] {
assert!(p.eval(&pt) >= 0.0, "(x+y)² should be >= 0");
}
}
SOSResult::NotSOS { witness } => {
// Should not find counterexample for a true SOS polynomial
panic!(
"(x+y)² incorrectly marked as not SOS with witness {:?}",
witness
);
}
}
}
#[test]
fn test_x_squared_plus_one() {
// x² + 1 is SOS
let x = Polynomial::var(0);
let p = x.square().add(&Polynomial::constant(1.0));
let checker = SOSChecker::default();
match checker.check(&p) {
SOSResult::IsSOS(_) => {}
SOSResult::Unknown => {} // Acceptable if solver didn't converge
SOSResult::NotSOS { .. } => panic!("x² + 1 should be SOS"),
}
}
}