Skip to content

Commit

Permalink
Merge branch 'master' into shub/replication-smoketest
Browse files Browse the repository at this point in the history
  • Loading branch information
Shubham8287 committed Feb 14, 2025
2 parents 17d082d + 0328a4f commit 33babcb
Show file tree
Hide file tree
Showing 86 changed files with 1,536 additions and 1,390 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/docker.yml
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ jobs:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Cache Docker layers
uses: actions/cache@v2
uses: actions/cache@v4
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }}
Expand Down Expand Up @@ -95,7 +95,7 @@ jobs:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Cache Docker layers
uses: actions/cache@v2
uses: actions/cache@v4
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }}
Expand Down
13 changes: 13 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 4 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ members = [
"crates/update",
"crates/vm",
"modules/benchmarks",
"modules/keynote-benchmarks",
"modules/perf-test",
"modules/module-test",
"modules/quickstart-chat",
Expand Down Expand Up @@ -192,6 +193,7 @@ nohash-hasher = "0.2"
once_cell = "1.16"
parking_lot = { version = "0.12.1", features = ["send_guard", "arc_lock"] }
paste = "1.0"
percent-encoding = "2.3"
petgraph = { version = "0.6.5", default-features = false }
pin-project-lite = "0.2.9"
postgres-types = "0.2.5"
Expand Down Expand Up @@ -249,6 +251,8 @@ tokio-util = { version = "0.7.4", features = ["time"] }
toml = "0.8"
toml_edit = "0.22.22"
tower-http = { version = "0.5", features = ["cors"] }
tower-layer = "0.3"
tower-service = "0.3"
tracing = "0.1.37"
tracing-appender = "0.2.2"
tracing-core = "0.1.31"
Expand Down
9 changes: 9 additions & 0 deletions crates/bindings-csharp/BSATN.Runtime.Tests/Tests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -166,5 +166,14 @@ public static void TimestampConversionChecks()
var laterStamp = stamp + newInterval;
Assert.Equal(laterStamp.MicrosecondsSinceUnixEpoch, us + newIntervalUs);
Assert.Equal(laterStamp.TimeDurationSince(stamp), newInterval);

#pragma warning disable CS1718
Assert.True(stamp == stamp);
#pragma warning restore CS1718
Assert.False(stamp == laterStamp);
Assert.True(stamp < laterStamp);
Assert.False(laterStamp < stamp);
Assert.Equal(-1, stamp.CompareTo(laterStamp));
Assert.Equal(+1, laterStamp.CompareTo(stamp));
}
}
19 changes: 18 additions & 1 deletion crates/bindings-csharp/BSATN.Runtime/Builtins.cs
Original file line number Diff line number Diff line change
Expand Up @@ -280,7 +280,9 @@ public AlgebraicType GetAlgebraicType(ITypeRegistrar registrar) =>
/// This type has less precision than DateTimeOffset (units of microseconds rather than units of 100ns).
/// </summary>
[StructLayout(LayoutKind.Sequential)] // we should be able to use it in FFI
public record struct Timestamp(long MicrosecondsSinceUnixEpoch) : IStructuralReadWrite
public record struct Timestamp(long MicrosecondsSinceUnixEpoch)
: IStructuralReadWrite,
IComparable<Timestamp>
{
public static implicit operator DateTimeOffset(Timestamp t) =>
DateTimeOffset.UnixEpoch.AddTicks(t.MicrosecondsSinceUnixEpoch * Util.TicksPerMicrosecond);
Expand Down Expand Up @@ -319,6 +321,21 @@ public readonly TimeDuration TimeDurationSince(Timestamp earlier) =>
public static Timestamp operator +(Timestamp point, TimeDuration interval) =>
new Timestamp(point.MicrosecondsSinceUnixEpoch + interval.Microseconds);

public int CompareTo(Timestamp that)
{
return this.MicrosecondsSinceUnixEpoch.CompareTo(that.MicrosecondsSinceUnixEpoch);
}

public static bool operator <(Timestamp l, Timestamp r)
{
return l.CompareTo(r) == -1;
}

public static bool operator >(Timestamp l, Timestamp r)
{
return l.CompareTo(r) == 1;
}

// --- auto-generated ---

public void ReadFields(BinaryReader reader)
Expand Down
15 changes: 15 additions & 0 deletions crates/bindings-macro/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,11 +45,25 @@ mod sym {
symbol!(primary_key);
symbol!(private);
symbol!(public);
symbol!(repr);
symbol!(sats);
symbol!(scheduled);
symbol!(unique);
symbol!(update);

symbol!(u8);
symbol!(i8);
symbol!(u16);
symbol!(i16);
symbol!(u32);
symbol!(i32);
symbol!(u64);
symbol!(i64);
symbol!(u128);
symbol!(i128);
symbol!(f32);
symbol!(f64);

impl PartialEq<Symbol> for syn::Ident {
fn eq(&self, sym: &Symbol) -> bool {
self == sym.0
Expand Down Expand Up @@ -350,6 +364,7 @@ pub fn schema_type(input: StdTokenStream) -> StdTokenStream {
sats_derive(input, true, |ty| {
let ident = ty.ident;
let name = &ty.name;

let krate = &ty.krate;
TokenStream::from_iter([
sats::derive_satstype(ty),
Expand Down
121 changes: 121 additions & 0 deletions crates/bindings-macro/src/sats.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@ pub(crate) struct SatsType<'a> {
#[allow(unused)]
pub original_attrs: &'a [syn::Attribute],
pub data: SatsTypeData<'a>,
/// Was the type marked as `#[repr(C)]`?
pub is_repr_c: bool,
}

pub(crate) enum SatsTypeData<'a> {
Expand Down Expand Up @@ -78,6 +80,17 @@ pub(crate) fn sats_type_from_derive(
extract_sats_type(&input.ident, &input.generics, &input.attrs, data, crate_fallback)
}

fn is_repr_c(attrs: &[syn::Attribute]) -> bool {
let mut is_repr_c = false;
for attr in attrs.iter().filter(|a| a.path() == sym::repr) {
let _ = attr.parse_nested_meta(|meta| {
is_repr_c |= meta.path.is_ident("C");
Ok(())
});
}
is_repr_c
}

pub(crate) fn extract_sats_type<'a>(
ident: &'a syn::Ident,
generics: &'a syn::Generics,
Expand Down Expand Up @@ -112,13 +125,16 @@ pub(crate) fn extract_sats_type<'a>(
let krate = krate.unwrap_or(crate_fallback);
let name = name.unwrap_or_else(|| crate::util::ident_to_litstr(ident));

let is_repr_c = is_repr_c(attrs);

Ok(SatsType {
ident,
generics,
name,
krate,
original_attrs: attrs,
data,
is_repr_c,
})
}

Expand Down Expand Up @@ -220,6 +236,48 @@ fn add_type_bounds(generics: &mut syn::Generics, trait_bound: &TokenStream) {
}
}

/// Returns the list of types if syntactically we see that the `ty`
/// is `#[repr(C)]` of only primitives.
///
/// We later assert semantically in generated code that the list of types
/// actually are primitives.
/// We'll also check that `ty` is paddingless.
fn extract_repr_c_primitive<'a>(ty: &'a SatsType) -> Option<Vec<&'a syn::Ident>> {
// Ensure we have a `#[repr(C)]` struct.
if !ty.is_repr_c {
return None;
}
let SatsTypeData::Product(fields) = &ty.data else {
return None;
};

// Ensure every field is a primitive and collect the idents.
const PRIM_TY: &[sym::Symbol] = &[
sym::u8,
sym::i8,
sym::u16,
sym::i16,
sym::u32,
sym::i32,
sym::u64,
sym::i64,
sym::u128,
sym::i128,
sym::f32,
sym::f64,
];
let mut field_tys = Vec::with_capacity(fields.len());
for field in fields {
if let syn::Type::Path(ty) = &field.ty {
let ident = ty.path.get_ident().filter(|ident| PRIM_TY.iter().any(|p| ident == p))?;
field_tys.push(ident);
} else {
return None;
}
}
Some(field_tys)
}

pub(crate) fn derive_deserialize(ty: &SatsType<'_>) -> TokenStream {
let (name, tuple_name) = (&ty.ident, &ty.name);
let spacetimedb_lib = &ty.krate;
Expand Down Expand Up @@ -249,6 +307,33 @@ pub(crate) fn derive_deserialize(ty: &SatsType<'_>) -> TokenStream {

match &ty.data {
SatsTypeData::Product(fields) => {
let mut fast_body = None;
if let Some(fields) = extract_repr_c_primitive(ty) {
fast_body = Some(quote! {
#[inline(always)]
fn deserialize_from_bsatn<R: #spacetimedb_lib::buffer::BufReader<'de>>(
mut deserializer: #spacetimedb_lib::bsatn::Deserializer<'de, R>
) -> Result<Self, #spacetimedb_lib::bsatn::DecodeError> {
const _: () = {
#(#spacetimedb_lib::bsatn::assert_is_primitive_type::<#fields>();)*
};
// This guarantees that `Self` has no padding.
if const { core::mem::size_of::<Self>() == #(core::mem::size_of::<#fields>())+* } {
let bytes = deserializer.get_slice(core::mem::size_of::<Self>())?;
let ptr = bytes as *const [u8] as *const u8 as *const Self;
// SAFETY:
// - `ptr` is valid for reads, `size_of::<T>()`.
// - `ptr` is trivially properly aligned (alignment = 1).
// - `ptr` points to a properly initialized `Foo`
// as we've guaranteed that there is no padding.
Ok(unsafe { core::ptr::read(ptr) })
} else {
Self::deserialize(deserializer)
}
}
});
}

let n_fields = fields.len();

let field_names = fields.iter().map(|f| f.ident.unwrap()).collect::<Vec<_>>();
Expand All @@ -260,6 +345,8 @@ pub(crate) fn derive_deserialize(ty: &SatsType<'_>) -> TokenStream {
#[allow(clippy::all)]
const _: () = {
impl #de_impl_generics #spacetimedb_lib::de::Deserialize<'de> for #name #ty_generics #de_where_clause {
#fast_body

fn deserialize<D: #spacetimedb_lib::de::Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
deserializer.deserialize_product(__ProductVisitor {
_marker: std::marker::PhantomData::<fn() -> #name #ty_generics>,
Expand Down Expand Up @@ -422,8 +509,41 @@ pub(crate) fn derive_serialize(ty: &SatsType) -> TokenStream {

let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();

let mut fast_body = None;
let body = match &ty.data {
SatsTypeData::Product(fields) => {
if let Some(fields) = extract_repr_c_primitive(ty) {
fast_body = Some(quote! {
#[inline(always)]
fn serialize_into_bsatn<W: #spacetimedb_lib::buffer::BufWriter>(
&self,
serializer: #spacetimedb_lib::bsatn::Serializer<'_, W>
) -> Result<(), #spacetimedb_lib::bsatn::EncodeError> {
const _: () = {
#(#spacetimedb_lib::bsatn::assert_is_primitive_type::<#fields>();)*
};
// This guarantees that `Self` has no padding.
if const { core::mem::size_of::<Self>() == #(core::mem::size_of::<#fields>())+* } {
// SAFETY:
// - We know `self` is non-null as it's a shared reference
// and we know it's valid for reads for `core::mem::size_of::<Self>()` bytes.
// Alignment of `u8` is 1, so it's trivially satisfied.
// - The slice is all within `self`, so in the same allocated object.
// - `self` does point to `core::mem::size_of::<Self>()` consecutive `u8`s,
// as per `assert_is_primitive_type` above,
// we know none of the fields of `Self` have any padding.
// - We're not going to mutate the memory within `bytes`.
// - We know `core::mem::size_of::<Self>() < isize::MAX`.
let bytes = unsafe { core::slice::from_raw_parts(self as *const _ as *const u8, core::mem::size_of::<Self>()) };
serializer.raw_write_bytes(bytes);
Ok(())
} else {
self.serialize(serializer)
}
}
});
}

let fieldnames = fields.iter().map(|field| field.ident.unwrap());
let tys = fields.iter().map(|f| &f.ty);
let fieldnamestrings = fields.iter().map(|field| field.name.as_ref().unwrap());
Expand Down Expand Up @@ -456,6 +576,7 @@ pub(crate) fn derive_serialize(ty: &SatsType) -> TokenStream {
};
quote! {
impl #impl_generics #spacetimedb_lib::ser::Serialize for #name #ty_generics #where_clause {
#fast_body
fn serialize<S: #spacetimedb_lib::ser::Serializer>(&self, __serializer: S) -> Result<S::Ok, S::Error> {
#body
}
Expand Down
7 changes: 3 additions & 4 deletions crates/bindings/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@ pub mod table;

use spacetimedb_lib::bsatn;
use std::cell::RefCell;
use std::collections::VecDeque;

pub use log;
#[cfg(feature = "rand")]
Expand Down Expand Up @@ -138,7 +137,7 @@ pub fn table_id_from_name(table_name: &str) -> TableId {
thread_local! {
/// A global pool of buffers used for iteration.
// This gets optimized away to a normal global since wasm32 doesn't have threads by default.
static ITER_BUFS: RefCell<VecDeque<Vec<u8>>> = const { RefCell::new(VecDeque::new()) };
static ITER_BUFS: RefCell<Vec<Vec<u8>>> = const { RefCell::new(Vec::new()) };
}

struct IterBuf {
Expand All @@ -149,7 +148,7 @@ impl IterBuf {
/// Take a buffer from the pool of buffers for row iterators, if one exists. Otherwise, allocate a new one.
fn take() -> Self {
let buf = ITER_BUFS
.with_borrow_mut(|v| v.pop_front())
.with_borrow_mut(|v| v.pop())
.unwrap_or_else(|| Vec::with_capacity(DEFAULT_BUFFER_CAPACITY));
Self { buf }
}
Expand All @@ -170,7 +169,7 @@ impl Drop for IterBuf {
fn drop(&mut self) {
self.buf.clear();
let buf = std::mem::take(&mut self.buf);
ITER_BUFS.with_borrow_mut(|v| v.push_back(buf));
ITER_BUFS.with_borrow_mut(|v| v.push(buf));
}
}

Expand Down
1 change: 1 addition & 0 deletions crates/cli/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ itertools.workspace = true
indicatif.workspace = true
jsonwebtoken.workspace = true
mimalloc.workspace = true
percent-encoding.workspace = true
regex.workspace = true
reqwest.workspace = true
rustyline.workspace = true
Expand Down
Loading

0 comments on commit 33babcb

Please sign in to comment.