Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/bumpalloc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ impl LeakyBumpAlloc {
std::process::abort();
}

self.ptr = new_ptr as *mut u8;
self.ptr = self.ptr.sub(ptr - new_ptr);
self.ptr
}

Expand Down
51 changes: 35 additions & 16 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,7 @@ use parking_lot::Mutex;
use std::borrow::Cow;
use std::ffi::OsStr;
use std::fmt;

use std::mem::size_of;
use std::path::Path;
use std::rc::Rc;
Expand All @@ -150,9 +151,9 @@ use std::sync::Arc;

mod stringcache;
pub use stringcache::*;
#[cfg(feature="serde")]
#[cfg(feature = "serde")]
pub mod serialization;
#[cfg(feature="serde")]
#[cfg(feature = "serde")]
pub use serialization::DeserializedCache;

mod bumpalloc;
Expand Down Expand Up @@ -188,7 +189,7 @@ impl Ord for Ustr {
/// e.g. a BTreeMap
impl PartialOrd for Ustr {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
self.as_str().partial_cmp(other.as_str())
Some(self.cmp(other))
}
}

Expand Down Expand Up @@ -302,10 +303,7 @@ impl Ustr {
fn as_string_cache_entry(&self) -> &StringCacheEntry {
// The allocator guarantees that the alignment is correct and that
// this pointer is non-null
unsafe {
&*((self.char_ptr.as_ptr() as usize - size_of::<StringCacheEntry>())
as *const StringCacheEntry)
}
unsafe { &*(self.char_ptr.as_ptr().cast::<StringCacheEntry>().sub(1)) }
}

/// Get the length (in bytes) of this string.
Expand Down Expand Up @@ -589,7 +587,6 @@ impl fmt::Debug for Ustr {

// Just feed the precomputed hash into the Hasher. Note that this will of course
// be terrible unless the Hasher in question is expecting a precomputed hash.
#[allow(clippy::derived_hash_with_manual_eq)]
impl Hash for Ustr {
fn hash<H: Hasher>(&self, state: &mut H) {
self.precomputed_hash().hash(state);
Expand Down Expand Up @@ -741,16 +738,30 @@ pub fn string_cache_iter() -> StringCacheIterator {
// points to the beginning of the allocated region. The first bytes will
// be uninitialized since we're bumping down
for a in &sc.old_allocs {
allocs.push((a.ptr(), a.end()));
// `LeakyBumpAlloc` in `old_allocs` may be "empty".
if a.ptr() < a.end() {
unsafe {
allocs.push(std::slice::from_raw_parts(
a.ptr(),
a.allocated(),
));
}
}
}
let ptr = sc.alloc.ptr();
let end = sc.alloc.end();
if ptr != end {
allocs.push((sc.alloc.ptr(), sc.alloc.end()));
if sc.alloc.ptr() < sc.alloc.end() {
unsafe {
allocs.push(std::slice::from_raw_parts(
sc.alloc.ptr(),
sc.alloc.allocated(),
));
}
}
}

let current_ptr = allocs[0].0;
let current_ptr = allocs
.first()
.map(|s| s.as_ptr())
.unwrap_or_else(std::ptr::null);
StringCacheIterator {
allocs,
current_alloc: 0,
Expand Down Expand Up @@ -938,7 +949,7 @@ mod tests {
// );
// }

#[cfg(all(feature="serde", not(miri)))]
#[cfg(all(feature = "serde", not(miri)))]
#[test]
fn serialization() {
let _t = TEST_LOCK.lock();
Expand Down Expand Up @@ -991,7 +1002,7 @@ mod tests {
assert_eq!(diff.len(), 0);
}

#[cfg(all(feature="serde", not(miri)))]
#[cfg(feature = "serde")]
#[test]
fn serialization_ustr() {
use super::{ustr, Ustr};
Expand Down Expand Up @@ -1051,6 +1062,14 @@ mod tests {
assert_eq!(Some(s1), s2);
}

#[test]
fn test_emtpy_cache() {
unsafe { super::_clear_cache() };
assert_eq!(
super::string_cache_iter().collect::<Vec<_>>(),
Vec::<&'static str>::new()
);

#[test]
fn as_refs() {
let _t = TEST_LOCK.lock();
Expand Down
24 changes: 12 additions & 12 deletions src/stringcache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -249,7 +249,7 @@ impl StringCache {
pub(crate) unsafe fn grow(&mut self) {
let new_mask = self.mask * 2 + 1;
let mut new_entries =
vec![std::ptr::null_mut() as *mut StringCacheEntry; new_mask + 1];
vec![std::ptr::null_mut::<StringCacheEntry>(); new_mask + 1];
// copy the existing map into the new map
let mut to_copy = self.num_entries;
for e in self.entries.iter_mut() {
Expand Down Expand Up @@ -329,7 +329,7 @@ unsafe impl Send for StringCache {}

#[doc(hidden)]
pub struct StringCacheIterator {
pub(crate) allocs: Vec<(*const u8, *const u8)>,
pub(crate) allocs: Vec<&'static [u8]>,
pub(crate) current_alloc: usize,
pub(crate) current_ptr: *const u8,
}
Expand All @@ -342,17 +342,21 @@ fn round_up_to(n: usize, align: usize) -> usize {
impl Iterator for StringCacheIterator {
type Item = &'static str;
fn next(&mut self) -> Option<Self::Item> {
let (_, end) = self.allocs[self.current_alloc];
if self.current_ptr >= end {
if self.allocs.is_empty() {
return None;
}

let mut s = self.allocs[self.current_alloc];
if self.current_ptr >= unsafe { s.as_ptr().add(s.len()) } {
// we've reached the end of the current alloc
if self.current_alloc == self.allocs.len() - 1 {
// we've reached the end
return None;
} else {
// advance to the next alloc
self.current_alloc += 1;
let (current_ptr, _) = self.allocs[self.current_alloc];
self.current_ptr = current_ptr;
s = self.allocs[self.current_alloc];
self.current_ptr = s.as_ptr();
}
}

Expand All @@ -365,11 +369,8 @@ impl Iterator for StringCacheIterator {
self.current_ptr = sce.next_entry();

// we know we're safe not to check here since we put valid UTF-8 in
let s = std::str::from_utf8_unchecked(std::slice::from_raw_parts(
sce.char_ptr(),
sce.len,
));
Some(s)
let pos = sce.char_ptr() as usize - s.as_ptr() as usize;
Some(std::str::from_utf8_unchecked(&s[pos..(pos + sce.len)]))
}
}
}
Expand All @@ -392,7 +393,6 @@ impl StringCacheEntry {
// Calcualte the address of the next entry in the cache. This is a utility
// function to hide the pointer arithmetic in iterators
pub(crate) unsafe fn next_entry(&self) -> *const u8 {
#[allow(clippy::ptr_offset_with_cast)]
self.char_ptr().add(round_up_to(
self.len + 1,
std::mem::align_of::<StringCacheEntry>(),
Expand Down