diff --git a/Cargo.lock b/Cargo.lock index 1328062e..52657962 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -197,6 +197,13 @@ version = "0.2.186" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "68ab91017fe16c622486840e4c83c9a37afeff978bd239b5293d61ece587de66" +[[package]] +name = "list-tsp" +version = "0.0.0" +dependencies = [ + "solverforge", +] + [[package]] name = "lock_api" version = "0.4.14" @@ -238,6 +245,20 @@ dependencies = [ "windows-sys", ] +[[package]] +name = "mixed-job-shop" +version = "0.0.0" +dependencies = [ + "solverforge", +] + +[[package]] +name = "nqueens" +version = "0.0.0" +dependencies = [ + "solverforge", +] + [[package]] name = "nu-ansi-term" version = "0.50.3" @@ -420,6 +441,13 @@ version = "1.0.23" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9774ba4a74de5f7b1c1451ed6cd5285a32eddb5cccb8cc655a4e50009e06477f" +[[package]] +name = "scalar-graph-coloring" +version = "0.0.0" +dependencies = [ + "solverforge", +] + [[package]] name = "scopeguard" version = "1.2.0" diff --git a/Cargo.toml b/Cargo.toml index b11c0f56..9ce262d5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -10,6 +10,10 @@ members = [ "crates/solverforge", "crates/solverforge-cvrp", "crates/solverforge-test", + "examples/scalar-graph-coloring", + "examples/list-tsp", + "examples/mixed-job-shop", + "examples/nqueens", ] [workspace.package] diff --git a/Makefile b/Makefile index e3a065b6..f154b2e7 100644 --- a/Makefile +++ b/Makefile @@ -63,7 +63,7 @@ examples: banner @printf "$(CYAN)$(BOLD)╔══════════════════════════════════════╗$(RESET)\n" @printf "$(CYAN)$(BOLD)║ Building Examples ║$(RESET)\n" @printf "$(CYAN)$(BOLD)╚══════════════════════════════════════╝$(RESET)\n\n" - @for ex in nqueens employee-scheduling vehicle-routing; do \ + @for ex in scalar-graph-coloring list-tsp mixed-job-shop nqueens; do \ printf "$(PROGRESS) Building $$ex...\n"; \ cargo build -p $$ex --quiet && \ printf "$(GREEN)$(CHECK) Built $$ex$(RESET)\n" || \ diff --git a/README.md b/README.md index e70f78b5..c2d3a81a 100644 --- a/README.md +++ b/README.md @@ -74,7 +74,7 @@ Current public naming follows neutral Rust contracts rather than `Typed*` prefix ## Features - **Score Types**: SoftScore, HardSoftScore, HardMediumSoftScore, BendableScore, HardSoftDecimalScore -- **ConstraintStream API**: Declarative constraints with fluent builders, source-aware generated streams, single-source and cross-join projected scoring rows, existence checks, joins, grouping, and balance/complemented streams +- **ConstraintStream API**: Declarative constraints with fluent builders, model-owned collection sources, single-source and cross-join projected scoring rows, existence checks, joins, grouping, and balance/complemented streams - **SERIO Engine**: Scoring Engine for Real-time Incremental Optimization - **Solver Phases**: - Generic Construction Heuristics (`FirstFit`, `CheapestInsertion`) over one mixed scalar/list `ModelContext` when matching list work is present, plus descriptor-scalar construction routing for pure scalar targets and specialized list phases (`ListRoundRobin`, `ListCheapestInsertion`, `ListRegretInsertion`, `ListClarkeWright`, `ListKOpt`) @@ -226,18 +226,29 @@ order; they do not consume construction order keys. ### 2. Define Constraints -The `#[planning_solution]` macro generates a `ScheduleConstraintStreams` trait with typed accessors for each collection field, so `factory.shifts()` replaces manual `for_each` extractors: +The `#[planning_solution]` macro generates source functions on the solution type +for each collection field. Use those functions with `ConstraintFactory::for_each(...)` +so constraints stay tied to the model-owned field instead of to a generated trait +import: ```rust -use solverforge::{ConstraintSet, HardSoftScore}; -use crate::domain::ScheduleConstraintStreams; // generated by #[planning_solution] +use solverforge::prelude::*; use solverforge::stream::{joiner::*, ConstraintFactory}; +use crate::domain::{Employee, Schedule, Shift}; + fn define_constraints() -> impl ConstraintSet { + let unassigned = ConstraintFactory::::new() + .for_each(Schedule::shifts()) + .unassigned() + .penalize_hard() + .named("Unassigned shift"); + let required_skill = ConstraintFactory::::new() - .shifts() + .for_each(Schedule::shifts()) .join(( - ConstraintFactory::::new().employees(), + ConstraintFactory::::new() + .for_each(Schedule::employees()), equal_bi( |shift: &Shift| shift.employee, |emp: &Employee| Some(emp.id), @@ -250,7 +261,7 @@ fn define_constraints() -> impl ConstraintSet { .named("Required skill"); let no_overlap = ConstraintFactory::::new() - .shifts() + .for_each(Schedule::shifts()) .join(equal(|shift: &Shift| shift.employee)) .filter(|a: &Shift, b: &Shift| { a.employee.is_some() && a.start < b.end && b.start < a.end @@ -258,7 +269,7 @@ fn define_constraints() -> impl ConstraintSet { .penalize_hard() .named("No overlap"); - (required_skill, no_overlap) + (unassigned, required_skill, no_overlap) } ``` @@ -275,9 +286,9 @@ struct AssignedShift { } let assigned_overlaps = ConstraintFactory::::new() - .shifts() + .for_each(Schedule::shifts()) .join(( - ConstraintFactory::::new().employees(), + ConstraintFactory::::new().for_each(Schedule::employees()), equal_bi(|shift: &Shift| shift.employee, |emp: &Employee| Some(emp.id)), )) .project(|shift: &Shift, employee: &Employee| AssignedShift { @@ -582,11 +593,13 @@ for constraint in &analysis.constraints { ## Examples -See the [`examples/`](examples/) directory: - -- **N-Queens**: Classic constraint satisfaction problem +Root workspace examples live under [`examples/`](examples/) as complete solver +packages: ```bash +cargo run -p scalar-graph-coloring +cargo run -p list-tsp +cargo run -p mixed-job-shop cargo run -p nqueens ``` @@ -685,16 +698,16 @@ Typical throughput: 300k-1M moves/second depending on constraint complexity for - Release notes are managed in `CHANGELOG.md` by commit-and-tag workflow. - **Modern CLI templates**: The standalone CLI introduced first-class application scaffolds around the retained `SolverManager` + `Solvable` + `solver.toml` API. The current CLI has since consolidated those starters behind the neutral `solverforge new ...` shell plus `solverforge generate ...` domain shaping. No manual solver loops, no sub-crate imports — only the `solverforge` facade crate. -- **Generated domain accessors**: `#[planning_solution]` generates a `{Name}ConstraintStreams` trait with typed `.field_name()` methods on `ConstraintFactory` — e.g., `factory.shifts()` instead of `factory.for_each(|s| &s.shifts)` +- **Generated model sources**: `#[planning_solution]` generates inherent source functions such as `Schedule::shifts()` and `Schedule::employees()` for use with `ConstraintFactory::for_each(...)` - **Ergonomic extractors**: `CollectionExtract` trait accepts both `|s| s.field.as_slice()` and `|s| &s.field` (via `vec(|s| &s.field)`) — no forced `.as_slice()` at every call site -- **Generated `.unassigned()` filter**: entities with `Option` planning variables get a `{Entity}UnassignedFilter` trait — e.g., `factory.shifts().unassigned()` filters to unassigned entities -- **Projected scoring rows**: generated accessors support `.project(...)` with named bounded projection types, creating scoring-only rows without materialized facts. +- **Generated `.unassigned()` support**: entities with exactly one `Option` planning variable can call `.unassigned()` on streams created from model sources +- **Projected scoring rows**: model source streams support `.project(...)` with named bounded projection types, creating scoring-only rows without materialized facts. - **Convenience scoring**: `penalize_hard()`, `penalize_soft()`, `reward_hard()`, `reward_soft()` on all stream types - **Single `.join(target)`**: one join method dispatching on argument type — `equal(|a| key)` for self-join, `(extractor_b, equal_bi(ka, kb))` for keyed cross-join, `(other_stream, |a, b| pred)` for predicate join - **`.named("name")`**: sole finalization method on all builders (replaces `as_constraint`) - **Score trait**: `one_hard()`, `one_soft()`, `one_medium()` default methods - **Joiners**: `equal`, `equal_bi`, `less_than`, `less_than_or_equal`, `greater_than`, `greater_than_or_equal`, `overlapping`, `filtering`, with `.and()` composition -- **Conditional existence**: `if_exists(...)`, `if_not_exists(...)` over generated/source-aware collection targets, including flattened collection existence for nested list membership +- **Conditional existence**: `if_exists(...)`, `if_not_exists(...)` over model-owned collection targets, including flattened collection existence for nested list membership ### What's New in 0.5.15 diff --git a/crates/solverforge-macros/WIREFRAME.md b/crates/solverforge-macros/WIREFRAME.md index 17b52786..4caeab67 100644 --- a/crates/solverforge-macros/WIREFRAME.md +++ b/crates/solverforge-macros/WIREFRAME.md @@ -110,7 +110,7 @@ Applies to structs. Adds derives: `Clone, Debug, PartialEq, Eq, ProblemFactImpl` - Hidden scalar metadata bridge: private indexed helpers for scalar variable count, name, allows-unassigned, value-source metadata, getter/setter, and entity-local value slices. Helper order matches `entity_descriptor()` genuine scalar variable order; the index is used for generated getter/setter dispatch, while manifest hook attachment resolves descriptor variables by descriptor index plus variable name. - Hidden list metadata bridge (when the entity has a `#[planning_list_variable]` field): public cross-module `__SOLVERFORGE_LIST_VARIABLE_COUNT` plus private `__SOLVERFORGE_LIST_VARIABLE_NAME`, `__SOLVERFORGE_LIST_ELEMENT_COLLECTION`, `__solverforge_list_field()`, `__solverforge_list_field_mut()`, `__solverforge_list_metadata()` - Hidden typed list bridge (when the entity has a `#[planning_list_variable]` field): `impl __internal::ListVariableEntity for Entity` -- `pub trait {Entity}UnassignedFilter<...>` (when the entity has exactly one `Option<_>` planning variable) — `.unassigned()` on `UniConstraintStream<_, Entity, ...>`, including generated accessor streams from `#[planning_solution]` +- Hidden unassigned bridge (when the entity has exactly one `Option<_>` planning variable): `impl __internal::UnassignedEntity for Entity`, enabling `.unassigned()` on `UniConstraintStream<_, Entity, ...>` without a generated public trait import ### `PlanningSolutionImpl` @@ -155,7 +155,7 @@ Applies to structs. Adds derives: `Clone, Debug, PartialEq, Eq, ProblemFactImpl` - `impl Solvable for T` (when constraints path specified) — `solve(self, runtime: SolverRuntime)` delegates to `solve_internal()` - `impl Analyzable for T` (when constraints path specified) — `analyze()` creates `ScoreDirector` with canonical shadow support and returns `ScoreAnalysis` - `fn solve_internal(self, runtime: SolverRuntime)` (when constraints path specified) — calls `run_solver()` for macro-generated solving, or loads `solver.toml` and passes it through the configured `config = "..."` callback before calling `run_solver_with_config()`; generated runtime helpers build one `ModelContext` containing typed scalar contexts plus zero or more owner-specific list contexts, delegate scalar hook attachment to the `planning_model!` support impl, sort those variable contexts to the descriptor-backed variable order emitted by the macros, compute hidden shape-aware solve-start telemetry (`__solverforge_total_list_elements()` for list models and `__solverforge_scalar_candidate_count()` for scalar models), and then call hidden `build_phases(config, &descriptor, &model)` -- `pub trait {Name}ConstraintStreams` — accessor methods for all `#[planning_entity_collection]` and `#[problem_fact_collection]` fields; implemented on `ConstraintFactory<{Name}, Sc>`. Each accessor returns a `UniConstraintStream` backed by `SourceExtract &[Item]>`, using `ChangeSource::Descriptor(idx)` for planning entities and `ChangeSource::Static` for problem facts so generated streams work with incremental `.if_exists(...)` / `.if_not_exists(...)`, `.project(...)`, and `.unassigned()`. There is still only one public `for_each` entrypoint. +- Public solution source methods for all `#[planning_entity_collection]`, `#[problem_fact_collection]`, and streamable `#[planning_list_element_collection]` fields. Each method is inherent on the solution type, for example `Plan::tasks()`, returns `impl solverforge::stream::CollectionExtract`, and carries hidden `ChangeSource::Descriptor(idx)` for planning entities or `ChangeSource::Static` for facts and list elements. User constraints call `ConstraintFactory::new().for_each(Plan::tasks())`; there is still only one public stream-entry verb. ### `ProblemFactImpl` @@ -193,7 +193,7 @@ Applies to structs. Adds derives: `Clone, Debug, PartialEq, Eq, ProblemFactImpl` | `generate_list_operations` | `fn(&Fields) -> TokenStream` | Generates the private runtime helper family, public owner-scoped list methods, and guarded single-owner generic methods without relying on bare-name metadata lookup | | `generate_solvable_solution` | `fn(&Ident, &Option) -> TokenStream` | Generates SolvableSolution/Solvable/Analyzable impls | | `generate_shadow_support` | `fn(&ShadowConfig, &Fields, &Ident) -> Result` | Generates `PlanningSolution` shadow method overrides | -| `generate_constraint_stream_extensions` | `fn(&Fields, &Ident) -> TokenStream` | Generates `{Name}ConstraintStreams` trait + impl on ConstraintFactory | +| `generate_collection_source_methods` | `fn(&Fields) -> TokenStream` | Generates inherent solution source methods used with ConstraintFactory::for_each | | `extract_option_inner_type` | `fn(&Type) -> Result<&Type, Error>` | Extracts `T` from `Option` | | `extract_collection_inner_type` | `fn(&Type) -> Option<&Type>` | Extracts `T` from `Vec` | diff --git a/crates/solverforge-macros/src/planning_entity/expand.rs b/crates/solverforge-macros/src/planning_entity/expand.rs index fe522d73..d04a103f 100644 --- a/crates/solverforge-macros/src/planning_entity/expand.rs +++ b/crates/solverforge-macros/src/planning_entity/expand.rs @@ -1,6 +1,6 @@ use proc_macro2::TokenStream; use quote::quote; -use syn::{Data, DeriveInput, Error, Fields}; +use syn::{parse_quote, Data, DeriveInput, Error, Fields}; use crate::attr_parse::{ attribute_argument_names, get_attribute, has_attribute, has_attribute_argument, diff --git a/crates/solverforge-macros/src/planning_entity/expand/derive.rs b/crates/solverforge-macros/src/planning_entity/expand/derive.rs index 9facba2c..91b39e0d 100644 --- a/crates/solverforge-macros/src/planning_entity/expand/derive.rs +++ b/crates/solverforge-macros/src/planning_entity/expand/derive.rs @@ -271,69 +271,25 @@ pub(crate) fn expand_derive(input: DeriveInput) -> Result { let unassigned_filter_extension = if optional_planning_variables.len() == 1 { let (field_name, field_type) = optional_planning_variables[0]; - let predicate_name = syn::Ident::new( - &format!( - "__{}_{}_unassigned", - name.to_string().to_lowercase(), - field_name - ), - proc_macro2::Span::call_site(), - ); - let filter_trait_name = syn::Ident::new( - &format!("{}UnassignedFilter", name), - proc_macro2::Span::call_site(), - ); + let mut unassigned_generics = generics.clone(); + unassigned_generics + .params + .push(parse_quote!(__SolverForgeSolution)); + unassigned_generics + .make_where_clause() + .predicates + .push(parse_quote!(__SolverForgeSolution: ::solverforge::__internal::PlanningSolution)); + let (unassigned_impl_generics, _, unassigned_where_clause) = + unassigned_generics.split_for_impl(); quote! { - #[allow(non_snake_case)] - fn #predicate_name( - _solution: &Solution, - entity: &#name, - ) -> bool - where - Solution: ::solverforge::__internal::PlanningSolution, + impl #unassigned_impl_generics ::solverforge::__internal::UnassignedEntity<__SolverForgeSolution> + for #name #ty_generics + #unassigned_where_clause { - let value: &::core::option::Option<#field_type> = &entity.#field_name; - value.is_none() - } - - pub trait #filter_trait_name { - type Output; - fn unassigned(self) -> Self::Output; - } - - impl #filter_trait_name - for ::solverforge::__internal::UniConstraintStream - where - Sc: ::solverforge::Score + 'static, - Solution: ::solverforge::__internal::PlanningSolution, - E: ::solverforge::__internal::CollectionExtract, - F: ::solverforge::__internal::UniFilter, - { - type Output = ::solverforge::__internal::UniConstraintStream< - Solution, - #name, - E, - ::solverforge::__internal::AndUniFilter< - F, - ::solverforge::__internal::FnUniFilter< - fn(&Solution, &#name) -> bool - >, - >, - Sc, - >; - - fn unassigned(self) -> Self::Output { - let (extractor, filter) = self.into_parts(); - ::solverforge::__internal::UniConstraintStream::from_parts( - extractor, - ::solverforge::__internal::AndUniFilter::new( - filter, - ::solverforge::__internal::FnUniFilter::new( - #predicate_name:: as fn(&Solution, &#name) -> bool - ), - ), - ) + fn is_unassigned(_solution: &__SolverForgeSolution, entity: &Self) -> bool { + let value: &::core::option::Option<#field_type> = &entity.#field_name; + value.is_none() } } } diff --git a/crates/solverforge-macros/src/planning_entity_tests.rs b/crates/solverforge-macros/src/planning_entity_tests.rs index 85f0d3ed..ff34f723 100644 --- a/crates/solverforge-macros/src/planning_entity_tests.rs +++ b/crates/solverforge-macros/src/planning_entity_tests.rs @@ -32,8 +32,8 @@ fn golden_entity_expansion_includes_descriptor_and_planning_id() { assert!(expanded.contains("const HAS_LIST_VARIABLE : bool = true")); assert!(expanded.contains("LIST_ELEMENT_SOURCE")); assert!(expanded.contains("fn __solverforge_list_metadata < Solution >")); - assert!(expanded.contains("pub trait TaskUnassignedFilter")); - assert!(expanded.contains("fn unassigned (self)")); + assert!(expanded.contains("UnassignedEntity < __SolverForgeSolution > for Task")); + assert!(expanded.contains("fn is_unassigned")); } #[test] diff --git a/crates/solverforge-macros/src/planning_solution/expand.rs b/crates/solverforge-macros/src/planning_solution/expand.rs index 7c7b32c8..a0ae1997 100644 --- a/crates/solverforge-macros/src/planning_solution/expand.rs +++ b/crates/solverforge-macros/src/planning_solution/expand.rs @@ -13,7 +13,7 @@ use super::runtime::{ generate_runtime_phase_support, generate_runtime_solve_internal, generate_solvable_solution, }; use super::shadow::generate_shadow_support; -use super::stream_extensions::generate_constraint_stream_extensions; +use super::stream_extensions::generate_collection_source_methods; use super::type_helpers::{extract_collection_inner_type, extract_option_inner_type}; pub(crate) fn expand_derive(input: DeriveInput) -> Result { @@ -169,7 +169,7 @@ pub(crate) fn expand_derive(input: DeriveInput) -> Result { generate_runtime_solve_internal(&constraints_path, &config_path, &solver_toml_path); let solvable_solution_impl = generate_solvable_solution(name, &constraints_path); - let stream_extensions = generate_constraint_stream_extensions(fields, name); + let collection_source_methods = generate_collection_source_methods(fields); let expanded = quote! { impl #impl_generics ::solverforge::__internal::PlanningSolution for #name #ty_generics #where_clause { @@ -206,6 +206,7 @@ pub(crate) fn expand_derive(input: DeriveInput) -> Result { } #(#collection_accessors)* + #collection_source_methods #list_operations #runtime_solve_internal @@ -213,8 +214,6 @@ pub(crate) fn expand_derive(input: DeriveInput) -> Result { #runtime_phase_support #solvable_solution_impl - - #stream_extensions }; Ok(expanded) diff --git a/crates/solverforge-macros/src/planning_solution/stream_extensions.rs b/crates/solverforge-macros/src/planning_solution/stream_extensions.rs index 072d3683..4b91b6a9 100644 --- a/crates/solverforge-macros/src/planning_solution/stream_extensions.rs +++ b/crates/solverforge-macros/src/planning_solution/stream_extensions.rs @@ -1,14 +1,11 @@ +use crate::attr_parse::has_attribute; use proc_macro2::TokenStream; use quote::quote; -use syn::Ident; - -use crate::attr_parse::has_attribute; use super::type_helpers::extract_collection_inner_type; -pub(super) fn generate_constraint_stream_extensions( +pub(super) fn generate_collection_source_methods( fields: &syn::punctuated::Punctuated, - solution_name: &Ident, ) -> TokenStream { let entity_fields: Vec<_> = fields .iter() @@ -20,8 +17,12 @@ pub(super) fn generate_constraint_stream_extensions( .filter(|f| has_attribute(&f.attrs, "problem_fact_collection")) .collect(); - let mut accessor_methods: Vec = Vec::new(); - let mut accessor_impls: Vec = Vec::new(); + let list_element_fields: Vec<_> = fields + .iter() + .filter(|f| has_attribute(&f.attrs, "planning_list_element_collection")) + .collect(); + + let mut source_methods: Vec = Vec::new(); for (descriptor_index, f) in entity_fields.iter().enumerate() { let field_name = match f.ident.as_ref() { @@ -34,27 +35,12 @@ pub(super) fn generate_constraint_stream_extensions( }; let descriptor_index_lit = syn::Index::from(descriptor_index); - accessor_methods.push(quote! { - fn #field_name(self) -> ::solverforge::__internal::UniConstraintStream< - #solution_name, - #element_type, - ::solverforge::__internal::SourceExtract &[#element_type]>, - ::solverforge::__internal::TrueFilter, - Sc>; - }); - - accessor_impls.push(quote! { - fn #field_name(self) -> ::solverforge::__internal::UniConstraintStream< - #solution_name, - #element_type, - ::solverforge::__internal::SourceExtract &[#element_type]>, - ::solverforge::__internal::TrueFilter, - Sc> - { - self.for_each(::solverforge::__internal::source( - (|s: &#solution_name| s.#field_name.as_slice()) as fn(&#solution_name) -> &[#element_type], + source_methods.push(quote! { + pub fn #field_name() -> impl ::solverforge::stream::CollectionExtract { + ::solverforge::__internal::source( + (|s: &Self| s.#field_name.as_slice()) as fn(&Self) -> &[#element_type], ::solverforge::__internal::ChangeSource::Descriptor(#descriptor_index_lit), - )) + ) } }); } @@ -69,49 +55,35 @@ pub(super) fn generate_constraint_stream_extensions( None => continue, }; - accessor_methods.push(quote! { - fn #field_name(self) -> ::solverforge::__internal::UniConstraintStream< - #solution_name, - #element_type, - ::solverforge::__internal::SourceExtract &[#element_type]>, - ::solverforge::__internal::TrueFilter, - Sc>; - }); - - accessor_impls.push(quote! { - fn #field_name(self) -> ::solverforge::__internal::UniConstraintStream< - #solution_name, - #element_type, - ::solverforge::__internal::SourceExtract &[#element_type]>, - ::solverforge::__internal::TrueFilter, - Sc> - { - self.for_each(::solverforge::__internal::source( - (|s: &#solution_name| s.#field_name.as_slice()) as fn(&#solution_name) -> &[#element_type], + source_methods.push(quote! { + pub fn #field_name() -> impl ::solverforge::stream::CollectionExtract { + ::solverforge::__internal::source( + (|s: &Self| s.#field_name.as_slice()) as fn(&Self) -> &[#element_type], ::solverforge::__internal::ChangeSource::Static, - )) + ) } }); } - if accessor_methods.is_empty() { - return TokenStream::new(); - } - - let trait_name = Ident::new( - &format!("{}ConstraintStreams", solution_name), - proc_macro2::Span::call_site(), - ); - - quote! { - pub trait #trait_name { - #(#accessor_methods)* - } + for f in list_element_fields.iter() { + let field_name = match f.ident.as_ref() { + Some(n) => n, + None => continue, + }; + let element_type = match extract_collection_inner_type(&f.ty) { + Some(t) => t, + None => continue, + }; - impl #trait_name - for ::solverforge::stream::ConstraintFactory<#solution_name, Sc> - { - #(#accessor_impls)* - } + source_methods.push(quote! { + pub fn #field_name() -> impl ::solverforge::stream::CollectionExtract { + ::solverforge::__internal::source( + (|s: &Self| s.#field_name.as_slice()) as fn(&Self) -> &[#element_type], + ::solverforge::__internal::ChangeSource::Static, + ) + } + }); } + + quote! { #(#source_methods)* } } diff --git a/crates/solverforge-macros/src/planning_solution_tests.rs b/crates/solverforge-macros/src/planning_solution_tests.rs index b4ad9c2c..b2eb2281 100644 --- a/crates/solverforge-macros/src/planning_solution_tests.rs +++ b/crates/solverforge-macros/src/planning_solution_tests.rs @@ -2,7 +2,7 @@ use crate::planning_solution::expand_derive; use syn::parse_quote; #[test] -fn golden_solution_expansion_emits_constraint_streams_and_descriptor() { +fn golden_solution_expansion_emits_model_sources_and_descriptor() { let input = parse_quote! { #[solverforge_constraints_path = "crate::constraints::create_constraints"] struct Plan { @@ -20,7 +20,12 @@ fn golden_solution_expansion_emits_constraint_streams_and_descriptor() { .to_string(); assert!(expanded.contains("impl :: solverforge :: __internal :: PlanningSolution for Plan")); - assert!(expanded.contains("pub trait PlanConstraintStreams")); + assert!(expanded + .contains("pub fn workers () -> impl :: solverforge :: stream :: CollectionExtract < Self , Item = Worker >")); + assert!(expanded + .contains("pub fn tasks () -> impl :: solverforge :: stream :: CollectionExtract < Self , Item = Task >")); + assert!(expanded.contains("ChangeSource :: Static")); + assert!(expanded.contains("ChangeSource :: Descriptor (0)")); assert!(expanded .contains("pub fn descriptor () -> :: solverforge :: __internal :: SolutionDescriptor")); assert!(expanded.contains("pub fn __solverforge_entity_tasks")); diff --git a/crates/solverforge-macros/tests/ui/pass/chained_scalar_metadata/domain/plan.rs b/crates/solverforge-macros/tests/ui/pass/chained_scalar_metadata/domain/plan.rs index 7b2aeb0e..78750ba9 100644 --- a/crates/solverforge-macros/tests/ui/pass/chained_scalar_metadata/domain/plan.rs +++ b/crates/solverforge-macros/tests/ui/pass/chained_scalar_metadata/domain/plan.rs @@ -16,10 +16,8 @@ pub struct Plan { } fn constraints() -> impl ConstraintSet { - use PlanConstraintStreams; - (ConstraintFactory::::new() - .tasks() + .for_each(Plan::tasks()) .penalize_soft() .named("penalize_tasks"),) } diff --git a/crates/solverforge-macros/tests/ui/pass/fixtures/scalar_multi_module_plan.rs b/crates/solverforge-macros/tests/ui/pass/fixtures/scalar_multi_module_plan.rs index 651e591e..2b1fb4b6 100644 --- a/crates/solverforge-macros/tests/ui/pass/fixtures/scalar_multi_module_plan.rs +++ b/crates/solverforge-macros/tests/ui/pass/fixtures/scalar_multi_module_plan.rs @@ -16,11 +16,9 @@ pub struct Plan { } fn constraints() -> impl ConstraintSet { - use PlanConstraintStreams; - ( ConstraintFactory::::new() - .tasks() + .for_each(Plan::tasks()) .penalize_soft() .named("penalize_tasks"), ) diff --git a/crates/solverforge-macros/tests/ui/pass/private_scalar_hooks/domain/plan.rs b/crates/solverforge-macros/tests/ui/pass/private_scalar_hooks/domain/plan.rs index 9d5e274f..60eb603e 100644 --- a/crates/solverforge-macros/tests/ui/pass/private_scalar_hooks/domain/plan.rs +++ b/crates/solverforge-macros/tests/ui/pass/private_scalar_hooks/domain/plan.rs @@ -16,10 +16,8 @@ pub struct Plan { } fn constraints() -> impl ConstraintSet { - use PlanConstraintStreams; - (ConstraintFactory::::new() - .tasks() + .for_each(Plan::tasks()) .filter(|task: &Task| task.worker.is_none()) .penalize(HardSoftScore::of(0, 1)) .named("unassigned task"),) diff --git a/crates/solverforge-macros/tests/ui/pass/qualified_manifest_attrs/domain/plan.rs b/crates/solverforge-macros/tests/ui/pass/qualified_manifest_attrs/domain/plan.rs index 88eac02a..12aa2a9f 100644 --- a/crates/solverforge-macros/tests/ui/pass/qualified_manifest_attrs/domain/plan.rs +++ b/crates/solverforge-macros/tests/ui/pass/qualified_manifest_attrs/domain/plan.rs @@ -16,10 +16,8 @@ pub struct Plan { } fn constraints() -> impl ConstraintSet { - use PlanConstraintStreams; - (ConstraintFactory::::new() - .tasks() + .for_each(Plan::tasks()) .penalize_soft() .named("penalize_tasks"),) } diff --git a/crates/solverforge-macros/tests/ui/pass/scalar_multi_module/domain/plan.rs b/crates/solverforge-macros/tests/ui/pass/scalar_multi_module/domain/plan.rs index edea81a8..4c7d79e7 100644 --- a/crates/solverforge-macros/tests/ui/pass/scalar_multi_module/domain/plan.rs +++ b/crates/solverforge-macros/tests/ui/pass/scalar_multi_module/domain/plan.rs @@ -16,11 +16,9 @@ pub struct Plan { } fn constraints() -> impl ConstraintSet { - use PlanConstraintStreams; - ( ConstraintFactory::::new() - .tasks() + .for_each(Plan::tasks()) .penalize_soft() .named("penalize_tasks"), ) diff --git a/crates/solverforge-macros/tests/ui/pass/standard_solution/domain/plan.rs b/crates/solverforge-macros/tests/ui/pass/standard_solution/domain/plan.rs index edea81a8..4c7d79e7 100644 --- a/crates/solverforge-macros/tests/ui/pass/standard_solution/domain/plan.rs +++ b/crates/solverforge-macros/tests/ui/pass/standard_solution/domain/plan.rs @@ -16,11 +16,9 @@ pub struct Plan { } fn constraints() -> impl ConstraintSet { - use PlanConstraintStreams; - ( ConstraintFactory::::new() - .tasks() + .for_each(Plan::tasks()) .penalize_soft() .named("penalize_tasks"), ) diff --git a/crates/solverforge-scoring/WIREFRAME.md b/crates/solverforge-scoring/WIREFRAME.md index c58fa9f9..54a3ae2b 100644 --- a/crates/solverforge-scoring/WIREFRAME.md +++ b/crates/solverforge-scoring/WIREFRAME.md @@ -121,7 +121,8 @@ src/ │ ├── projected_stream/source/filtered.rs — Row-level filtered projected source │ ├── projected_stream/source/merged.rs — Merged projected sources with source-slot offsets │ ├── projected_stream/source/joined.rs — Cross-join `.project(...)` projected source -│ ├── collection_extract.rs — CollectionExtract trait, source-aware extractors, VecExtract wrapper, vec() constructor +│ ├── collection_extract.rs — CollectionExtract trait, hidden source metadata, VecExtract wrapper, vec() constructor +│ ├── unassigned.rs — Hidden UnassignedEntity hook and `.unassigned()` stream method │ ├── join_target.rs — JoinTarget trait + 3 impls (self-join, keyed cross-join, predicate cross-join) │ ├── key_extract.rs — KeyExtract trait, EntityKeyAdapter struct │ ├── arity_stream_macros/ @@ -403,11 +404,11 @@ Constraints own their `ConstraintRef` once. Metadata and analysis types borrow t **`ConstraintFactory`** — Entry point. - `new()`, `for_each()` → `UniConstraintStream` -- Generated domain accessors call the same `for_each()` with hidden descriptor/static source metadata. +- Generated solution source methods pass `for_each()` hidden descriptor/static source metadata. **`UniConstraintStream`** — Single collection stream. -- Operations: `filter()`, `join(target)` (single dispatch via `JoinTarget`), `group_by()`, `balance()`, `project(projection)` → `ProjectedConstraintStream`, `flattened(flatten)` → `FlattenedCollectionTarget`, `if_exists(target)`, `if_not_exists(target)`, `penalize()`, `penalize_with()`, `penalize_hard_with()`, `penalize_hard()`, `penalize_soft()`, `reward()`, `reward_with()`, `reward_hard_with()`, `reward_hard()`, `reward_soft()` -- Unfiltered `UniConstraintStream<..., TrueFilter, ...>` implements `CollectionExtract` by delegating to its source extractor. This lets keyed cross-join targets use generated/source-aware streams directly, preserving `ChangeSource` metadata. +- Operations: `filter()`, `unassigned()` when the entity implements hidden `UnassignedEntity`, `join(target)` (single dispatch via `JoinTarget`), `group_by()`, `balance()`, `project(projection)` → `ProjectedConstraintStream`, `flattened(flatten)` → `FlattenedCollectionTarget`, `if_exists(target)`, `if_not_exists(target)`, `penalize()`, `penalize_with()`, `penalize_hard_with()`, `penalize_hard()`, `penalize_soft()`, `reward()`, `reward_with()`, `reward_hard_with()`, `reward_hard()`, `reward_soft()` +- Unfiltered `UniConstraintStream<..., TrueFilter, ...>` implements `CollectionExtract` by delegating to its source extractor. This lets keyed cross-join targets use generated model sources directly, preserving hidden source metadata. - `join()` dispatch: `equal(|a| key)` → self-join `BiConstraintStream`; `(extractor_b, equal_bi(ka, kb))` → keyed `CrossBiConstraintStream`; `(other_stream, |a, b| pred)` → predicate `CrossBiConstraintStream` - `into_parts()` → `(E, F)`, `from_parts(extractor, filter)` → `Self`, `extractor()` → `&E` @@ -444,7 +445,9 @@ impl Projection for AssignmentLoadEntries { } } -factory.assignments().project(AssignmentLoadEntries) +ConstraintFactory::::new() + .for_each(Plan::assignments()) + .project(AssignmentLoadEntries) ``` **`ProjectedGroupedConstraintStream` / `ProjectedGroupedConstraintBuilder`** — Grouped projected rows using stock collectors such as `sum()` and `count()`. Grouped retained state uses the same `ProjectedRowOwner` ownership index as ungrouped projected rows. Collector values do not need `Clone`; retained grouped state stores the projected row once by `ProjectedRowCoordinate` and recomputes key/value on retract. `named()` → `ProjectedGroupedConstraint`. @@ -495,9 +498,11 @@ factory.for_each(vec(|s: &Schedule| &s.employees)) .join((vec(|s: &Schedule| &s.employees), equal_bi(...))) ``` -**`ChangeSource`** — Enum describing whether a stream source can localize descriptor-owned incremental callbacks: `Unknown`, `Static`, or `Descriptor(idx)`. `Descriptor(idx)` owns localized events for that descriptor. `Static` never localizes. `Unknown` is non-localized metadata for raw/manual extraction: it is valid for `evaluate()` and `initialize()`, but localized `on_insert(...)` / `on_retract(...)` callbacks panic because the entity index cannot be safely mapped to a source. +**`CollectionExtract`** — Public low-level source contract accepted by `ConstraintFactory::for_each(...)`. Macro-generated solution source functions return `impl CollectionExtract` so users do not import generated helper traits. -**`SourceExtract` / `source(...)`** — Descriptor-aware collection extraction used by generated accessors and structured source-aware streams. Planning entity collections carry `ChangeSource::Descriptor(idx)`; static fact collections carry `ChangeSource::Static`. Raw `for_each` closure extractors use `ChangeSource::Unknown`; wrap extractors with `source(..., ChangeSource::Descriptor(idx))` when they must participate in localized incremental mutation callbacks. +**`ChangeSource`** — Hidden enum describing whether a stream source can localize descriptor-owned incremental callbacks: `Unknown`, `Static`, or `Descriptor(idx)`. `Descriptor(idx)` owns localized events for that descriptor. `Static` never localizes. `Unknown` is non-localized metadata for raw/manual extraction: it is valid for `evaluate()` and `initialize()`, but localized `on_insert(...)` / `on_retract(...)` callbacks panic because the entity index cannot be safely mapped to a source. + +**`SourceExtract` / `source(...)`** — Hidden descriptor-aware collection extraction used by macro-generated solution source methods. Planning entity collections carry `ChangeSource::Descriptor(idx)`; static fact and list-element collections carry `ChangeSource::Static`. These symbols are not part of the facade stream workflow. **`FlattenExtract

`** — Trait for flattening a parent entity into a child slice for existence filtering. Blanket impl for `Fn(&P) -> &[B] + Send + Sync`; `FlattenVecExtract` adapts `Fn(&P) -> &Vec`. diff --git a/crates/solverforge-scoring/src/stream/collection_extract.rs b/crates/solverforge-scoring/src/stream/collection_extract.rs index 7dc7d005..2edac5ce 100644 --- a/crates/solverforge-scoring/src/stream/collection_extract.rs +++ b/crates/solverforge-scoring/src/stream/collection_extract.rs @@ -34,13 +34,14 @@ pub trait CollectionExtract: Send + Sync { fn extract<'s>(&self, s: &'s S) -> &'s [Self::Item]; // Identifies whether the solution source owns descriptor-scoped localized updates. - // Plain extractors are non-localized; wrap them in `source(..., Descriptor(idx))` - // when they must receive localized mutation callbacks. + // Plain extractors are non-localized. Macro-generated solution source methods + // attach descriptor/static metadata through hidden internal support. fn change_source(&self) -> ChangeSource { ChangeSource::Unknown } } +#[doc(hidden)] #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum ChangeSource { Unknown, @@ -132,6 +133,7 @@ where } } +#[doc(hidden)] #[derive(Clone, Copy)] pub struct SourceExtract { extractor: E, @@ -210,6 +212,7 @@ where VecExtract(f) } +#[doc(hidden)] pub fn source(extractor: E, change_source: ChangeSource) -> SourceExtract { SourceExtract::new(extractor, change_source) } diff --git a/crates/solverforge-scoring/src/stream/factory.rs b/crates/solverforge-scoring/src/stream/factory.rs index fe47851c..4979de57 100644 --- a/crates/solverforge-scoring/src/stream/factory.rs +++ b/crates/solverforge-scoring/src/stream/factory.rs @@ -56,11 +56,12 @@ where } } - /* Creates a zero-erasure uni-constraint stream over entities extracted from the solution. + /* Creates a zero-erasure uni-constraint stream over a collection source. - The extractor function receives a reference to the solution and returns - a slice of entities to iterate over. The extractor type is preserved - as a concrete generic for full zero-erasure. + For macro-generated models, pass the inherent solution source method: + `ConstraintFactory::new().for_each(Schedule::shifts())`. + Low-level callers can still pass any `CollectionExtract`. + The extractor type is preserved as a concrete generic for full zero-erasure. */ pub fn for_each(self, extractor: E) -> UniConstraintStream where diff --git a/crates/solverforge-scoring/src/stream/mod.rs b/crates/solverforge-scoring/src/stream/mod.rs index f2788358..ef19697c 100644 --- a/crates/solverforge-scoring/src/stream/mod.rs +++ b/crates/solverforge-scoring/src/stream/mod.rs @@ -82,6 +82,7 @@ mod penta_stream; mod projected_stream; mod quad_stream; mod tri_stream; +mod unassigned; mod uni_stream; mod weighting_support; @@ -108,4 +109,6 @@ pub use projected_stream::{ }; pub use quad_stream::{QuadConstraintBuilder, QuadConstraintStream}; pub use tri_stream::{TriConstraintBuilder, TriConstraintStream}; +#[doc(hidden)] +pub use unassigned::UnassignedEntity; pub use uni_stream::{UniConstraintBuilder, UniConstraintStream}; diff --git a/crates/solverforge-scoring/src/stream/unassigned.rs b/crates/solverforge-scoring/src/stream/unassigned.rs new file mode 100644 index 00000000..1aa49e57 --- /dev/null +++ b/crates/solverforge-scoring/src/stream/unassigned.rs @@ -0,0 +1,32 @@ +use solverforge_core::score::Score; + +use super::collection_extract::CollectionExtract; +use super::filter::{AndUniFilter, FnUniFilter, UniFilter}; +use super::UniConstraintStream; + +#[doc(hidden)] +pub trait UnassignedEntity: Clone + Send + Sync + 'static { + fn is_unassigned(solution: &S, entity: &Self) -> bool; +} + +impl UniConstraintStream +where + S: Send + Sync + 'static, + A: UnassignedEntity, + E: CollectionExtract, + F: UniFilter, + Sc: Score + 'static, +{ + pub fn unassigned( + self, + ) -> UniConstraintStream bool>>, Sc> { + let (extractor, filter) = self.into_parts(); + UniConstraintStream::from_parts( + extractor, + AndUniFilter::new( + filter, + FnUniFilter::new(A::is_unassigned as fn(&S, &A) -> bool), + ), + ) + } +} diff --git a/crates/solverforge/WIREFRAME.md b/crates/solverforge/WIREFRAME.md index 3a3b2bf8..05439edf 100644 --- a/crates/solverforge/WIREFRAME.md +++ b/crates/solverforge/WIREFRAME.md @@ -166,7 +166,7 @@ Re-exports the fluent constraint stream API: ```rust pub use solverforge_scoring::stream::collection_extract::vec; pub use solverforge_scoring::stream::collection_extract::{ - source, ChangeSource, CollectionExtract, FlattenExtract, SourceExtract, VecExtract, + CollectionExtract, FlattenExtract, VecExtract, }; pub use solverforge_scoring::stream::collector; pub use solverforge_scoring::stream::{joiner, ConstraintFactory, FlattenedCollectionTarget}; @@ -179,17 +179,19 @@ prelude re-exports `count`, `sum`, and `load_balance`. Extractor ergonomics: all `for_each` and join extractor params accept `CollectionExtract`. Use `|s| s.field.as_slice()` for slices, or `vec(|s| &s.field)` when the field is a `Vec` and you prefer `&field` syntax. -Generated keyed joins: unfiltered generated accessors can be passed directly as keyed join targets, preserving hidden `ChangeSource` metadata: +Model-owned keyed joins use solution source methods generated by `#[planning_solution]`, preserving hidden descriptor/static metadata: ```rust -factory.assignments().join(( - ConstraintFactory::::new().furnaces(), +ConstraintFactory::::new() + .for_each(Plan::assignments()) + .join(( + ConstraintFactory::::new().for_each(Plan::furnaces()), equal_bi(|assignment| assignment.furnace_idx(), |furnace| Some(furnace.id)), )) ``` -Generated existence ergonomics: there is one public `ConstraintFactory::for_each(...)`. Generated `{Name}ConstraintStreams` accessors call it with hidden `ChangeSource::Descriptor(idx)` / `ChangeSource::Static` metadata so localized incremental callbacks use entity indexes only for the owning planning-entity collection. Raw facade `for_each(...)` extractors do not carry localized source ownership. Flattened existence targets use `.flattened(...)` and `FlattenedCollectionTarget`. +Generated existence ergonomics: there is one public `ConstraintFactory::for_each(...)`. `#[planning_solution]` generates inherent source methods such as `Plan::assignments()` and `Plan::furnaces()` with hidden descriptor/static metadata so localized incremental callbacks use entity indexes only for the owning planning-entity collection. Raw facade `for_each(...)` extractors do not carry localized source ownership. Flattened existence targets use `.flattened(...)` and `FlattenedCollectionTarget`. -Projected scoring ergonomics: `factory.assignments().project(TaskShiftWorkEntries)` creates bounded scoring rows from a named `Projection` type without materializing facts or entities. Keyed cross joins use `.project(|assignment, capacity| Row { ... })` to emit one scoring row per retained joined pair. Projected streams can be filtered, self-joined, merged, grouped, and weighted like normal scoring state. Single-source projection implementations emit through `ProjectionSink` and declare `MAX_EMITS`; joined-pair closures do not need a helper type. Projected output rows, projected self-join keys, and grouped collector values do not need `Clone`. Projected self-join ordering is coordinate-stable by `ProjectedRowCoordinate`, not sparse storage row id. +Projected scoring ergonomics: `ConstraintFactory::new().for_each(Plan::assignments()).project(TaskShiftWorkEntries)` creates bounded scoring rows from a named `Projection` type without materializing facts or entities. Keyed cross joins use `.project(|assignment, capacity| Row { ... })` to emit one scoring row per retained joined pair. Projected streams can be filtered, self-joined, merged, grouped, and weighted like normal scoring state. Single-source projection implementations emit through `ProjectionSink` and declare `MAX_EMITS`; joined-pair closures do not need a helper type. Projected output rows, projected self-join keys, and grouped collector values do not need `Clone`. Projected self-join ordering is coordinate-stable by `ProjectedRowCoordinate`, not sparse storage row id. ## `__internal` Module (`#[doc(hidden)]`) @@ -230,11 +232,11 @@ construction and grouped local-search selectors. **Async bridge types:** - `tokio::sync::mpsc::UnboundedSender` -**Stream types for macro-generated extension traits (from `solverforge-scoring`):** +**Stream types for macro-generated source methods (from `solverforge-scoring`):** - `ChangeSource`, `CollectionExtract`, `SourceExtract` - `UniConstraintStream`, `UniConstraintBuilder` - `TrueFilter`, `UniFilter`, `FnUniFilter`, `AndUniFilter` -- `source` +- `source`, `UnassignedEntity` **Derive macros (from `solverforge-macros`):** - `PlanningEntityImpl`, `PlanningSolutionImpl`, `ProblemFactImpl` diff --git a/crates/solverforge/src/__internal.rs b/crates/solverforge/src/__internal.rs index b2cee6d0..f04aefb9 100644 --- a/crates/solverforge/src/__internal.rs +++ b/crates/solverforge/src/__internal.rs @@ -51,9 +51,9 @@ pub use solverforge_solver::{ // Config pub use solverforge_config::{PhaseConfig, SolverConfig}; -// Stream types needed for macro-generated extension traits +// Stream types needed for macro-generated source methods pub use solverforge_scoring::stream::filter::{AndUniFilter, FnUniFilter, TrueFilter, UniFilter}; pub use solverforge_scoring::stream::{ - source, ChangeSource, CollectionExtract, SourceExtract, UniConstraintBuilder, + source, ChangeSource, CollectionExtract, SourceExtract, UnassignedEntity, UniConstraintBuilder, UniConstraintStream, }; diff --git a/crates/solverforge/src/stream.rs b/crates/solverforge/src/stream.rs index 459416f6..2dc278ab 100644 --- a/crates/solverforge/src/stream.rs +++ b/crates/solverforge/src/stream.rs @@ -1,6 +1,6 @@ pub use solverforge_scoring::stream::collection_extract::vec; pub use solverforge_scoring::stream::collection_extract::{ - source, ChangeSource, CollectionExtract, FlattenExtract, SourceExtract, VecExtract, + CollectionExtract, FlattenExtract, VecExtract, }; /// Fluent constraint stream API. pub use solverforge_scoring::stream::collector; diff --git a/crates/solverforge/tests/configurable_solvable/domain/configurable/configurable_solution.rs b/crates/solverforge/tests/configurable_solvable/domain/configurable/configurable_solution.rs index ea67118d..e6e5b5fb 100644 --- a/crates/solverforge/tests/configurable_solvable/domain/configurable/configurable_solution.rs +++ b/crates/solverforge/tests/configurable_solvable/domain/configurable/configurable_solution.rs @@ -21,11 +21,9 @@ pub struct ConfigurableSolution { } fn define_constraints() -> impl ConstraintSet { - use ConfigurableSolutionConstraintStreams; - ( ConstraintFactory::::new() - .entities() + .for_each(ConfigurableSolution::entities()) .penalize_with(|_| HardSoftScore::of(0, 0)) .named("noop"), ) diff --git a/crates/solverforge/tests/configurable_solvable/domain/explicit/explicit_configurable_solution.rs b/crates/solverforge/tests/configurable_solvable/domain/explicit/explicit_configurable_solution.rs index 3cb39d61..6b513442 100644 --- a/crates/solverforge/tests/configurable_solvable/domain/explicit/explicit_configurable_solution.rs +++ b/crates/solverforge/tests/configurable_solvable/domain/explicit/explicit_configurable_solution.rs @@ -23,11 +23,9 @@ pub struct ExplicitConfigurableSolution { fn define_explicit_constraints( ) -> impl ConstraintSet { - use ExplicitConfigurableSolutionConstraintStreams; - ( ConstraintFactory::::new() - .entities() + .for_each(ExplicitConfigurableSolution::entities()) .penalize_with(|_| HardSoftScore::of(0, 0)) .named("noop"), ) diff --git a/crates/solverforge/tests/constraint_accessors.rs b/crates/solverforge/tests/constraint_accessors.rs index a9b61fbf..25515c45 100644 --- a/crates/solverforge/tests/constraint_accessors.rs +++ b/crates/solverforge/tests/constraint_accessors.rs @@ -2,17 +2,13 @@ // and convenience methods (penalize_hard, penalize_soft, etc.) on the Score trait. use solverforge::prelude::*; -use solverforge::stream::{source, ChangeSource, ConstraintFactory, SourceExtract}; +use solverforge::stream::{CollectionExtract, ConstraintFactory}; use solverforge::IncrementalConstraint; #[path = "constraint_accessors/domain/mod.rs"] mod domain; -use domain::{Schedule, ScheduleConstraintStreams, ShiftUnassignedFilter}; - -fn raw_shifts(schedule: &Schedule) -> &[domain::Shift] { - schedule.shifts.as_slice() -} +use domain::Schedule; #[test] fn test_one_hard_returns_correct_score() { @@ -34,20 +30,19 @@ fn test_soft_score_one_soft() { #[test] fn test_constraint_stream_accessors_compile() { - // Test that the generated .shifts() and .employees() methods exist and compile. let factory = ConstraintFactory::::new(); - let _shifts_stream = factory.shifts(); + let _shifts_stream = factory.for_each(Schedule::shifts()); let factory3 = ConstraintFactory::::new(); - let _unassigned_stream = factory3.shifts().unassigned(); + let _unassigned_stream = factory3.for_each(Schedule::shifts()).unassigned(); let factory2 = ConstraintFactory::::new(); - let _employees_stream = factory2.employees(); + let _employees_stream = factory2.for_each(Schedule::employees()); } #[test] fn generated_descriptor_stream_localizes_callbacks() { let mut constraint = ConstraintFactory::::new() - .shifts() + .for_each(Schedule::shifts()) .penalize_hard() .named("shift count"); @@ -78,11 +73,10 @@ fn generated_descriptor_stream_localizes_callbacks() { } #[test] -fn facade_exports_source_aware_extractors() { - let extractor: SourceExtract &[domain::Shift]> = source( - raw_shifts as fn(&Schedule) -> &[domain::Shift], - ChangeSource::Descriptor(0), - ); +fn generated_model_sources_carry_descriptor_metadata() { + let extractor = Schedule::shifts(); + assert!(extractor.change_source().owns_descriptor(0)); + let mut constraint = ConstraintFactory::::new() .for_each(extractor) .penalize_hard() @@ -110,7 +104,10 @@ fn facade_exports_source_aware_extractors() { #[test] fn test_penalize_hard_compiles_on_stream() { let factory = ConstraintFactory::::new(); - let constraint = factory.shifts().penalize_hard().named("Test penalize hard"); + let constraint = factory + .for_each(Schedule::shifts()) + .penalize_hard() + .named("Test penalize hard"); let _ = constraint; } @@ -118,7 +115,10 @@ fn test_penalize_hard_compiles_on_stream() { #[test] fn test_penalize_soft_compiles_on_stream() { let factory = ConstraintFactory::::new(); - let constraint = factory.shifts().penalize_soft().named("Test penalize soft"); + let constraint = factory + .for_each(Schedule::shifts()) + .penalize_soft() + .named("Test penalize soft"); let _ = constraint; } @@ -126,7 +126,10 @@ fn test_penalize_soft_compiles_on_stream() { #[test] fn test_named_alias_compiles() { let factory = ConstraintFactory::::new(); - let constraint = factory.shifts().penalize_hard().named("Test named alias"); + let constraint = factory + .for_each(Schedule::shifts()) + .penalize_hard() + .named("Test named alias"); let _ = constraint; } diff --git a/crates/solverforge/tests/constraint_accessors/domain/mod.rs b/crates/solverforge/tests/constraint_accessors/domain/mod.rs index b9993c8e..a6eaa3d0 100644 --- a/crates/solverforge/tests/constraint_accessors/domain/mod.rs +++ b/crates/solverforge/tests/constraint_accessors/domain/mod.rs @@ -6,6 +6,6 @@ solverforge::planning_model! { mod shift; pub use employee::Employee; - pub use schedule::{Schedule, ScheduleConstraintStreams}; - pub use shift::{Shift, ShiftUnassignedFilter}; + pub use schedule::Schedule; + pub use shift::Shift; } diff --git a/crates/solverforge/tests/list_clarke_wright_publication/domain/publication_plan.rs b/crates/solverforge/tests/list_clarke_wright_publication/domain/publication_plan.rs index 4d38f48e..a87c3bf6 100644 --- a/crates/solverforge/tests/list_clarke_wright_publication/domain/publication_plan.rs +++ b/crates/solverforge/tests/list_clarke_wright_publication/domain/publication_plan.rs @@ -49,13 +49,11 @@ impl solverforge::cvrp::VrpSolution for PublicationPlan { } fn define_constraints() -> impl ConstraintSet { - use PublicationPlanConstraintStreams; - (ConstraintFactory::::new() - .customers() + .for_each(PublicationPlan::customers()) .if_not_exists(( ConstraintFactory::::new() - .routes() + .for_each(PublicationPlan::routes()) .flattened(|route: &Route| &route.visits), equal_bi( |customer: &Customer| customer.id, diff --git a/crates/solverforge/tests/mixed_variable_order_runtime/plan.rs b/crates/solverforge/tests/mixed_variable_order_runtime/plan.rs index 0ac36055..ef9a4550 100644 --- a/crates/solverforge/tests/mixed_variable_order_runtime/plan.rs +++ b/crates/solverforge/tests/mixed_variable_order_runtime/plan.rs @@ -19,10 +19,8 @@ pub struct Plan { } fn constraints() -> impl ConstraintSet { - use PlanConstraintStreams; - (ConstraintFactory::::new() - .routes() + .for_each(Plan::routes()) .filter(|route: &Route| route.first_visit.is_none()) .penalize(HardSoftScore::of(1, 0)) .named("missing first visit"),) diff --git a/crates/solverforge/tests/projected_publication.rs b/crates/solverforge/tests/projected_publication.rs index 25befb0c..3e898ad6 100644 --- a/crates/solverforge/tests/projected_publication.rs +++ b/crates/solverforge/tests/projected_publication.rs @@ -102,14 +102,12 @@ impl Projection for CapacityEntries { #[test] fn projected_stream_is_public_and_infers_output_type() { - use PlanConstraintStreams; - let constraint = ConstraintFactory::::new() - .assignments() + .for_each(Plan::assignments()) .project(AssignmentEntries) .merge( ConstraintFactory::::new() - .capacities() + .for_each(Plan::capacities()) .project(CapacityEntries), ) .group_by( @@ -138,12 +136,10 @@ fn projected_stream_is_public_and_infers_output_type() { #[test] fn cross_join_project_is_public_and_infers_output_type() { - use PlanConstraintStreams; - let constraint = ConstraintFactory::::new() - .assignments() + .for_each(Plan::assignments()) .join(( - ConstraintFactory::::new().capacities(), + ConstraintFactory::::new().for_each(Plan::capacities()), joiner::equal_bi( |assignment: &Assignment| assignment.bucket, |capacity: &Capacity| capacity.bucket, diff --git a/crates/solverforge/tests/scalar_multi_module_runtime/plan.rs b/crates/solverforge/tests/scalar_multi_module_runtime/plan.rs index 668927fc..cedc1af7 100644 --- a/crates/solverforge/tests/scalar_multi_module_runtime/plan.rs +++ b/crates/solverforge/tests/scalar_multi_module_runtime/plan.rs @@ -19,10 +19,8 @@ pub struct Plan { } fn constraints() -> impl ConstraintSet { - use PlanConstraintStreams; - (ConstraintFactory::::new() - .tasks() + .for_each(Plan::tasks()) .filter(|task: &Task| task.worker.is_none()) .penalize(HardSoftScore::of(0, 1)) .named("unassigned task"),) diff --git a/crates/solverforge/tests/scalar_runtime_publication/domain/plan.rs b/crates/solverforge/tests/scalar_runtime_publication/domain/plan.rs index f0636ade..f552640b 100644 --- a/crates/solverforge/tests/scalar_runtime_publication/domain/plan.rs +++ b/crates/solverforge/tests/scalar_runtime_publication/domain/plan.rs @@ -19,10 +19,8 @@ pub struct Plan { } fn define_constraints() -> impl ConstraintSet { - use PlanConstraintStreams; - (ConstraintFactory::::new() - .tasks() + .for_each(Plan::tasks()) .filter(|task: &Task| task.resource_idx.is_none()) .penalize(HardSoftScore::of(0, 1)) .named("unassigned task"),) diff --git a/crates/solverforge/tests/scalar_runtime_selector_assembly/domain/plan.rs b/crates/solverforge/tests/scalar_runtime_selector_assembly/domain/plan.rs index 85f9d683..3754c323 100644 --- a/crates/solverforge/tests/scalar_runtime_selector_assembly/domain/plan.rs +++ b/crates/solverforge/tests/scalar_runtime_selector_assembly/domain/plan.rs @@ -19,10 +19,8 @@ pub struct Plan { } fn define_constraints() -> impl ConstraintSet { - use PlanConstraintStreams; - (ConstraintFactory::::new() - .tasks() + .for_each(Plan::tasks()) .penalize_with(|_| HardSoftScore::of(0, 0)) .named("noop"),) } diff --git a/docs/naming-charter.md b/docs/naming-charter.md index 0a52e778..d160f958 100644 --- a/docs/naming-charter.md +++ b/docs/naming-charter.md @@ -8,6 +8,7 @@ Since SolverForge v0.9.0, `scalar` is the only canonical opposite of `list`. - Keep established base move names when the structure is already obvious. - Use explicit contrast names when the surface would otherwise hide scalar-vs-list meaning. - Do not keep dual naming, compatibility aliases, or legacy synonyms. +- Macro-generated public names must not encode helper roles by prefix or suffix when an inherent method or stable fluent API can carry the concept. ## Canonical names diff --git a/examples/list-tsp/Cargo.toml b/examples/list-tsp/Cargo.toml new file mode 100644 index 00000000..a3590532 --- /dev/null +++ b/examples/list-tsp/Cargo.toml @@ -0,0 +1,8 @@ +[package] +name = "list-tsp" +version = "0.0.0" +edition.workspace = true +publish = false + +[dependencies] +solverforge = { workspace = true } diff --git a/examples/list-tsp/solver.toml b/examples/list-tsp/solver.toml new file mode 100644 index 00000000..8988a391 --- /dev/null +++ b/examples/list-tsp/solver.toml @@ -0,0 +1,3 @@ +[[phases]] +type = "construction_heuristic" +construction_heuristic_type = "first_fit" diff --git a/examples/list-tsp/src/domain/mod.rs b/examples/list-tsp/src/domain/mod.rs new file mode 100644 index 00000000..e131aa80 --- /dev/null +++ b/examples/list-tsp/src/domain/mod.rs @@ -0,0 +1,11 @@ +solverforge::planning_model! { + root = "examples/list-tsp/src/domain"; + + mod route; + mod tour_plan; + mod visit; + + pub use route::Route; + pub use tour_plan::TourPlan; + pub use visit::Visit; +} diff --git a/examples/list-tsp/src/domain/route.rs b/examples/list-tsp/src/domain/route.rs new file mode 100644 index 00000000..4d304ac7 --- /dev/null +++ b/examples/list-tsp/src/domain/route.rs @@ -0,0 +1,10 @@ +use solverforge::prelude::*; + +#[planning_entity] +pub struct Route { + #[planning_id] + pub id: usize, + + #[planning_list_variable(element_collection = "visit_values")] + pub visits: Vec, +} diff --git a/examples/list-tsp/src/domain/tour_plan.rs b/examples/list-tsp/src/domain/tour_plan.rs new file mode 100644 index 00000000..570ae0d0 --- /dev/null +++ b/examples/list-tsp/src/domain/tour_plan.rs @@ -0,0 +1,43 @@ +use solverforge::prelude::*; +use solverforge::stream::joiner::equal_bi; +use solverforge::stream::ConstraintFactory; + +use super::{Route, Visit}; + +#[planning_solution( + constraints = "define_constraints", + solver_toml = "../../solver.toml" +)] +pub struct TourPlan { + #[problem_fact_collection] + pub visits: Vec, + + #[planning_list_element_collection(owner = "routes")] + pub visit_values: Vec, + + #[planning_entity_collection] + pub routes: Vec, + + #[planning_score] + pub score: Option, +} + +fn define_constraints() -> impl ConstraintSet { + let all_visits_assigned = ConstraintFactory::::new() + .for_each(TourPlan::visits()) + .if_not_exists(( + ConstraintFactory::::new() + .for_each(TourPlan::routes()) + .flattened(|route: &Route| &route.visits), + equal_bi(|visit: &Visit| visit.id, |assigned: &usize| *assigned), + )) + .penalize_hard() + .named("All visits assigned"); + + let compact_route = ConstraintFactory::::new() + .for_each(TourPlan::routes()) + .penalize_with(|route: &Route| HardSoftScore::of_soft(route.visits.len() as i64)) + .named("Route length"); + + (all_visits_assigned, compact_route) +} diff --git a/examples/list-tsp/src/domain/visit.rs b/examples/list-tsp/src/domain/visit.rs new file mode 100644 index 00000000..0a537c45 --- /dev/null +++ b/examples/list-tsp/src/domain/visit.rs @@ -0,0 +1,7 @@ +use solverforge::prelude::*; + +#[problem_fact] +pub struct Visit { + #[planning_id] + pub id: usize, +} diff --git a/examples/list-tsp/src/main.rs b/examples/list-tsp/src/main.rs new file mode 100644 index 00000000..d1cbeb56 --- /dev/null +++ b/examples/list-tsp/src/main.rs @@ -0,0 +1,46 @@ +use solverforge::{SolverEvent, SolverManager, SolverTerminalReason}; + +mod domain; + +use domain::{Route, TourPlan, Visit}; + +static MANAGER: SolverManager = SolverManager::new(); + +fn main() { + let visit_values: Vec = (1..=4).collect(); + let plan = TourPlan { + visits: visit_values + .iter() + .copied() + .map(|id| Visit { id }) + .collect(), + visit_values, + routes: vec![Route { + id: 0, + visits: Vec::new(), + }], + score: None, + }; + + let (job_id, mut events) = MANAGER.solve(plan).expect("solver job should start"); + + while let Some(event) = events.blocking_recv() { + match event { + SolverEvent::Completed { metadata, solution } => { + assert_eq!( + metadata.terminal_reason, + Some(SolverTerminalReason::Completed) + ); + println!("score: {}", solution.score.expect("completed score")); + for route in solution.routes { + println!("route {} -> {:?}", route.id, route.visits); + } + MANAGER.delete(job_id).expect("delete completed job"); + break; + } + SolverEvent::Failed { error, .. } => panic!("solver failed: {error}"), + SolverEvent::Cancelled { .. } => panic!("solver was cancelled"), + _ => {} + } + } +} diff --git a/examples/mixed-job-shop/Cargo.toml b/examples/mixed-job-shop/Cargo.toml new file mode 100644 index 00000000..cf0aca89 --- /dev/null +++ b/examples/mixed-job-shop/Cargo.toml @@ -0,0 +1,8 @@ +[package] +name = "mixed-job-shop" +version = "0.0.0" +edition.workspace = true +publish = false + +[dependencies] +solverforge = { workspace = true } diff --git a/examples/mixed-job-shop/solver.toml b/examples/mixed-job-shop/solver.toml new file mode 100644 index 00000000..8988a391 --- /dev/null +++ b/examples/mixed-job-shop/solver.toml @@ -0,0 +1,3 @@ +[[phases]] +type = "construction_heuristic" +construction_heuristic_type = "first_fit" diff --git a/examples/mixed-job-shop/src/domain/job_shop_plan.rs b/examples/mixed-job-shop/src/domain/job_shop_plan.rs new file mode 100644 index 00000000..7e016bdd --- /dev/null +++ b/examples/mixed-job-shop/src/domain/job_shop_plan.rs @@ -0,0 +1,69 @@ +use solverforge::prelude::*; +use solverforge::stream::joiner::equal_bi; +use solverforge::stream::ConstraintFactory; + +use super::{Machine, MachineSequence, Operation}; + +#[planning_solution( + constraints = "define_constraints", + solver_toml = "../../solver.toml" +)] +pub struct JobShopPlan { + #[problem_fact_collection] + pub machines: Vec, + + #[planning_entity_collection] + pub operations: Vec, + + #[planning_list_element_collection(owner = "machine_sequences")] + pub operation_values: Vec, + + #[planning_entity_collection] + pub machine_sequences: Vec, + + #[planning_score] + pub score: Option, +} + +fn define_constraints() -> impl ConstraintSet { + let unassigned_machine = ConstraintFactory::::new() + .for_each(JobShopPlan::operations()) + .unassigned() + .penalize_hard() + .named("Unassigned operation machine"); + + let unscheduled_operation = ConstraintFactory::::new() + .for_each(JobShopPlan::operations()) + .if_not_exists(( + ConstraintFactory::::new() + .for_each(JobShopPlan::machine_sequences()) + .flattened(|machine: &MachineSequence| &machine.operations), + equal_bi( + |operation: &Operation| operation.id, + |assigned: &usize| *assigned, + ), + )) + .penalize_hard() + .named("Unscheduled operation"); + + let same_job_same_machine = ConstraintFactory::::new() + .for_each(JobShopPlan::operations()) + .join(( + ConstraintFactory::::new() + .for_each(JobShopPlan::operations()), + |left: &Operation, right: &Operation| { + left.id < right.id + && left.job == right.job + && left.machine_idx.is_some() + && left.machine_idx == right.machine_idx + }, + )) + .penalize_soft() + .named("Same job machine reuse"); + + ( + unassigned_machine, + unscheduled_operation, + same_job_same_machine, + ) +} diff --git a/examples/mixed-job-shop/src/domain/machine.rs b/examples/mixed-job-shop/src/domain/machine.rs new file mode 100644 index 00000000..7faca024 --- /dev/null +++ b/examples/mixed-job-shop/src/domain/machine.rs @@ -0,0 +1,7 @@ +use solverforge::prelude::*; + +#[problem_fact] +pub struct Machine { + #[planning_id] + pub id: usize, +} diff --git a/examples/mixed-job-shop/src/domain/machine_sequence.rs b/examples/mixed-job-shop/src/domain/machine_sequence.rs new file mode 100644 index 00000000..546ddcf2 --- /dev/null +++ b/examples/mixed-job-shop/src/domain/machine_sequence.rs @@ -0,0 +1,10 @@ +use solverforge::prelude::*; + +#[planning_entity] +pub struct MachineSequence { + #[planning_id] + pub id: usize, + + #[planning_list_variable(element_collection = "operation_values")] + pub operations: Vec, +} diff --git a/examples/mixed-job-shop/src/domain/mod.rs b/examples/mixed-job-shop/src/domain/mod.rs new file mode 100644 index 00000000..3f6e50df --- /dev/null +++ b/examples/mixed-job-shop/src/domain/mod.rs @@ -0,0 +1,13 @@ +solverforge::planning_model! { + root = "examples/mixed-job-shop/src/domain"; + + mod job_shop_plan; + mod machine; + mod machine_sequence; + mod operation; + + pub use job_shop_plan::JobShopPlan; + pub use machine::Machine; + pub use machine_sequence::MachineSequence; + pub use operation::Operation; +} diff --git a/examples/mixed-job-shop/src/domain/operation.rs b/examples/mixed-job-shop/src/domain/operation.rs new file mode 100644 index 00000000..7be6d189 --- /dev/null +++ b/examples/mixed-job-shop/src/domain/operation.rs @@ -0,0 +1,12 @@ +use solverforge::prelude::*; + +#[planning_entity] +pub struct Operation { + #[planning_id] + pub id: usize, + pub job: usize, + pub step: usize, + + #[planning_variable(value_range_provider = "machines", allows_unassigned = true)] + pub machine_idx: Option, +} diff --git a/examples/mixed-job-shop/src/main.rs b/examples/mixed-job-shop/src/main.rs new file mode 100644 index 00000000..b96d4578 --- /dev/null +++ b/examples/mixed-job-shop/src/main.rs @@ -0,0 +1,60 @@ +use solverforge::{SolverEvent, SolverManager, SolverTerminalReason}; + +mod domain; + +use domain::{JobShopPlan, Machine, MachineSequence, Operation}; + +static MANAGER: SolverManager = SolverManager::new(); + +fn main() { + let operation_values: Vec = (0..6).collect(); + let plan = JobShopPlan { + machines: (0..2).map(|id| Machine { id }).collect(), + operations: operation_values + .iter() + .copied() + .map(|id| Operation { + id, + job: id / 2, + step: id % 2, + machine_idx: None, + }) + .collect(), + operation_values, + machine_sequences: (0..2) + .map(|id| MachineSequence { + id, + operations: Vec::new(), + }) + .collect(), + score: None, + }; + + let (job_id, mut events) = MANAGER.solve(plan).expect("solver job should start"); + + while let Some(event) = events.blocking_recv() { + match event { + SolverEvent::Completed { metadata, solution } => { + assert_eq!( + metadata.terminal_reason, + Some(SolverTerminalReason::Completed) + ); + println!("score: {}", solution.score.expect("completed score")); + for operation in &solution.operations { + println!( + "operation {} job {} step {} -> machine {:?}", + operation.id, operation.job, operation.step, operation.machine_idx + ); + } + for sequence in solution.machine_sequences { + println!("machine {} sequence {:?}", sequence.id, sequence.operations); + } + MANAGER.delete(job_id).expect("delete completed job"); + break; + } + SolverEvent::Failed { error, .. } => panic!("solver failed: {error}"), + SolverEvent::Cancelled { .. } => panic!("solver was cancelled"), + _ => {} + } + } +} diff --git a/examples/nqueens/Cargo.toml b/examples/nqueens/Cargo.toml new file mode 100644 index 00000000..e899a748 --- /dev/null +++ b/examples/nqueens/Cargo.toml @@ -0,0 +1,8 @@ +[package] +name = "nqueens" +version = "0.0.0" +edition.workspace = true +publish = false + +[dependencies] +solverforge = { workspace = true } diff --git a/examples/nqueens/solver.toml b/examples/nqueens/solver.toml new file mode 100644 index 00000000..8988a391 --- /dev/null +++ b/examples/nqueens/solver.toml @@ -0,0 +1,3 @@ +[[phases]] +type = "construction_heuristic" +construction_heuristic_type = "first_fit" diff --git a/examples/nqueens/src/domain/board.rs b/examples/nqueens/src/domain/board.rs new file mode 100644 index 00000000..0f4abd0d --- /dev/null +++ b/examples/nqueens/src/domain/board.rs @@ -0,0 +1,47 @@ +use solverforge::prelude::*; +use solverforge::stream::ConstraintFactory; + +use super::{Queen, Row}; + +#[planning_solution( + constraints = "define_constraints", + solver_toml = "../../solver.toml" +)] +pub struct Board { + #[problem_fact_collection] + pub rows: Vec, + + #[planning_entity_collection] + pub queens: Vec, + + #[planning_score] + pub score: Option, +} + +fn define_constraints() -> impl ConstraintSet { + let unassigned = ConstraintFactory::::new() + .for_each(Board::queens()) + .unassigned() + .penalize_hard() + .named("Unassigned queen"); + + let conflict = ConstraintFactory::::new() + .for_each(Board::queens()) + .join(( + ConstraintFactory::::new().for_each(Board::queens()), + |left: &Queen, right: &Queen| { + if left.column >= right.column { + return false; + } + let (Some(left_row), Some(right_row)) = (left.row_idx, right.row_idx) else { + return false; + }; + left_row == right_row + || left_row.abs_diff(right_row) == left.column.abs_diff(right.column) + }, + )) + .penalize_hard() + .named("Queen conflict"); + + (unassigned, conflict) +} diff --git a/examples/nqueens/src/domain/mod.rs b/examples/nqueens/src/domain/mod.rs new file mode 100644 index 00000000..446fbae4 --- /dev/null +++ b/examples/nqueens/src/domain/mod.rs @@ -0,0 +1,11 @@ +solverforge::planning_model! { + root = "examples/nqueens/src/domain"; + + mod board; + mod queen; + mod row; + + pub use board::Board; + pub use queen::Queen; + pub use row::Row; +} diff --git a/examples/nqueens/src/domain/queen.rs b/examples/nqueens/src/domain/queen.rs new file mode 100644 index 00000000..fe5c137a --- /dev/null +++ b/examples/nqueens/src/domain/queen.rs @@ -0,0 +1,11 @@ +use solverforge::prelude::*; + +#[planning_entity] +pub struct Queen { + #[planning_id] + pub id: usize, + pub column: usize, + + #[planning_variable(value_range_provider = "rows", allows_unassigned = true)] + pub row_idx: Option, +} diff --git a/examples/nqueens/src/domain/row.rs b/examples/nqueens/src/domain/row.rs new file mode 100644 index 00000000..7c89bd4b --- /dev/null +++ b/examples/nqueens/src/domain/row.rs @@ -0,0 +1,7 @@ +use solverforge::prelude::*; + +#[problem_fact] +pub struct Row { + #[planning_id] + pub id: usize, +} diff --git a/examples/nqueens/src/main.rs b/examples/nqueens/src/main.rs new file mode 100644 index 00000000..88072831 --- /dev/null +++ b/examples/nqueens/src/main.rs @@ -0,0 +1,44 @@ +use solverforge::{SolverEvent, SolverManager, SolverTerminalReason}; + +mod domain; + +use domain::{Board, Queen, Row}; + +static MANAGER: SolverManager = SolverManager::new(); + +fn main() { + let seed_rows = [0, 4, 7, 5, 2, 6, 1, 3]; + let board = Board { + rows: (0..8).map(|id| Row { id }).collect(), + queens: (0..8) + .map(|column| Queen { + id: column, + column, + row_idx: Some(seed_rows[column]), + }) + .collect(), + score: None, + }; + + let (job_id, mut events) = MANAGER.solve(board).expect("solver job should start"); + + while let Some(event) = events.blocking_recv() { + match event { + SolverEvent::Completed { metadata, solution } => { + assert_eq!( + metadata.terminal_reason, + Some(SolverTerminalReason::Completed) + ); + println!("score: {}", solution.score.expect("completed score")); + for queen in solution.queens { + println!("column {} -> row {:?}", queen.column, queen.row_idx); + } + MANAGER.delete(job_id).expect("delete completed job"); + break; + } + SolverEvent::Failed { error, .. } => panic!("solver failed: {error}"), + SolverEvent::Cancelled { .. } => panic!("solver was cancelled"), + _ => {} + } + } +} diff --git a/examples/scalar-graph-coloring/Cargo.toml b/examples/scalar-graph-coloring/Cargo.toml new file mode 100644 index 00000000..6b4f0291 --- /dev/null +++ b/examples/scalar-graph-coloring/Cargo.toml @@ -0,0 +1,8 @@ +[package] +name = "scalar-graph-coloring" +version = "0.0.0" +edition.workspace = true +publish = false + +[dependencies] +solverforge = { workspace = true } diff --git a/examples/scalar-graph-coloring/solver.toml b/examples/scalar-graph-coloring/solver.toml new file mode 100644 index 00000000..8988a391 --- /dev/null +++ b/examples/scalar-graph-coloring/solver.toml @@ -0,0 +1,3 @@ +[[phases]] +type = "construction_heuristic" +construction_heuristic_type = "first_fit" diff --git a/examples/scalar-graph-coloring/src/domain/color.rs b/examples/scalar-graph-coloring/src/domain/color.rs new file mode 100644 index 00000000..308034c8 --- /dev/null +++ b/examples/scalar-graph-coloring/src/domain/color.rs @@ -0,0 +1,8 @@ +use solverforge::prelude::*; + +#[problem_fact] +pub struct Color { + #[planning_id] + pub id: usize, + pub name: String, +} diff --git a/examples/scalar-graph-coloring/src/domain/graph_coloring.rs b/examples/scalar-graph-coloring/src/domain/graph_coloring.rs new file mode 100644 index 00000000..bae0c836 --- /dev/null +++ b/examples/scalar-graph-coloring/src/domain/graph_coloring.rs @@ -0,0 +1,44 @@ +use solverforge::prelude::*; +use solverforge::stream::ConstraintFactory; + +use super::{Color, Node}; + +#[planning_solution( + constraints = "define_constraints", + solver_toml = "../../solver.toml" +)] +pub struct GraphColoring { + #[problem_fact_collection] + pub colors: Vec, + + #[planning_entity_collection] + pub nodes: Vec, + + #[planning_score] + pub score: Option, +} + +fn define_constraints() -> impl ConstraintSet { + let unassigned = ConstraintFactory::::new() + .for_each(GraphColoring::nodes()) + .unassigned() + .penalize_hard() + .named("Unassigned color"); + + let adjacent_same_color = ConstraintFactory::::new() + .for_each(GraphColoring::nodes()) + .join(( + ConstraintFactory::::new() + .for_each(GraphColoring::nodes()), + |left: &Node, right: &Node| { + left.id < right.id + && left.neighbors.contains(&right.id) + && left.color_idx.is_some() + && left.color_idx == right.color_idx + }, + )) + .penalize_hard() + .named("Adjacent color conflict"); + + (unassigned, adjacent_same_color) +} diff --git a/examples/scalar-graph-coloring/src/domain/mod.rs b/examples/scalar-graph-coloring/src/domain/mod.rs new file mode 100644 index 00000000..588ed7f5 --- /dev/null +++ b/examples/scalar-graph-coloring/src/domain/mod.rs @@ -0,0 +1,11 @@ +solverforge::planning_model! { + root = "examples/scalar-graph-coloring/src/domain"; + + mod color; + mod graph_coloring; + mod node; + + pub use color::Color; + pub use graph_coloring::GraphColoring; + pub use node::Node; +} diff --git a/examples/scalar-graph-coloring/src/domain/node.rs b/examples/scalar-graph-coloring/src/domain/node.rs new file mode 100644 index 00000000..a77af644 --- /dev/null +++ b/examples/scalar-graph-coloring/src/domain/node.rs @@ -0,0 +1,11 @@ +use solverforge::prelude::*; + +#[planning_entity] +pub struct Node { + #[planning_id] + pub id: usize, + pub neighbors: Vec, + + #[planning_variable(value_range_provider = "colors", allows_unassigned = true)] + pub color_idx: Option, +} diff --git a/examples/scalar-graph-coloring/src/main.rs b/examples/scalar-graph-coloring/src/main.rs new file mode 100644 index 00000000..8f50f263 --- /dev/null +++ b/examples/scalar-graph-coloring/src/main.rs @@ -0,0 +1,81 @@ +use solverforge::{SolverEvent, SolverManager, SolverTerminalReason}; + +mod domain; + +use domain::{Color, GraphColoring, Node}; + +static MANAGER: SolverManager = SolverManager::new(); + +fn main() { + let graph = GraphColoring { + colors: vec![ + Color { + id: 0, + name: "red".to_string(), + }, + Color { + id: 1, + name: "blue".to_string(), + }, + Color { + id: 2, + name: "green".to_string(), + }, + ], + nodes: vec![ + Node { + id: 0, + neighbors: vec![1, 2], + color_idx: None, + }, + Node { + id: 1, + neighbors: vec![0, 2, 3], + color_idx: None, + }, + Node { + id: 2, + neighbors: vec![0, 1, 4], + color_idx: None, + }, + Node { + id: 3, + neighbors: vec![1, 4], + color_idx: None, + }, + Node { + id: 4, + neighbors: vec![2, 3], + color_idx: None, + }, + ], + score: None, + }; + + let (job_id, mut events) = MANAGER.solve(graph).expect("solver job should start"); + + while let Some(event) = events.blocking_recv() { + match event { + SolverEvent::Completed { metadata, solution } => { + assert_eq!( + metadata.terminal_reason, + Some(SolverTerminalReason::Completed) + ); + println!("score: {}", solution.score.expect("completed score")); + for node in solution.nodes { + let color = node + .color_idx + .and_then(|idx| solution.colors.get(idx)) + .map(|color| color.name.as_str()) + .unwrap_or("unassigned"); + println!("node {} -> {color}", node.id); + } + MANAGER.delete(job_id).expect("delete completed job"); + break; + } + SolverEvent::Failed { error, .. } => panic!("solver failed: {error}"), + SolverEvent::Cancelled { .. } => panic!("solver was cancelled"), + _ => {} + } + } +}