lib.rs 22.4 KB
Newer Older
1 2 3 4 5 6
//! The arena, a fast but limited type of allocator.
//!
//! Arenas are a type of allocator that destroy the objects within, all at
//! once, once the arena itself is destroyed. They do not support deallocation
//! of individual objects while the arena itself is still alive. The benefit
//! of an arena is very fast allocation; just a pointer bump.
C
Corey Richardson 已提交
7
//!
8
//! This crate implements several kinds of arena.
9

M
Mark Rousskov 已提交
10 11 12 13
#![doc(
    html_root_url = "https://doc.rust-lang.org/nightly/",
    test(no_crate_inject, attr(deny(warnings)))
)]
14
#![feature(core_intrinsics)]
15
#![feature(dropck_eyepatch)]
16
#![feature(raw_vec_internals)]
17
#![cfg_attr(test, feature(test))]
18 19
#![allow(deprecated)]

20
extern crate alloc;
J
John Kåre Alsaker 已提交
21

22 23
use rustc_data_structures::cold_path;
use smallvec::SmallVec;
24

P
Patrick Walton 已提交
25
use std::cell::{Cell, RefCell};
M
Michael Darakananda 已提交
26
use std::cmp;
27
use std::intrinsics;
P
Piotr Czarnecki 已提交
28
use std::marker::{PhantomData, Send};
29
use std::mem;
30
use std::ptr;
M
Mark-Simulacrum 已提交
31
use std::slice;
32

33
use alloc::raw_vec::RawVec;
34

35
/// An arena that can hold objects of only one type.
36 37
pub struct TypedArena<T> {
    /// A pointer to the next object to be allocated.
38
    ptr: Cell<*mut T>,
39 40 41

    /// A pointer to the end of the allocated area. When this pointer is
    /// reached, a new chunk is allocated.
42
    end: Cell<*mut T>,
43

44
    /// A vector of arena chunks.
45
    chunks: RefCell<Vec<TypedArenaChunk<T>>>,
46 47 48

    /// Marker indicating that dropping the arena causes its owned
    /// instances of `T` to be dropped.
P
Piotr Czarnecki 已提交
49
    _own: PhantomData<T>,
50 51
}

D
Daniel Micay 已提交
52
struct TypedArenaChunk<T> {
53
    /// The raw storage for the arena chunk.
54
    storage: RawVec<T>,
55 56
    /// The number of valid entries in the chunk.
    entries: usize,
57 58
}

D
Daniel Micay 已提交
59
impl<T> TypedArenaChunk<T> {
60
    #[inline]
61
    unsafe fn new(capacity: usize) -> TypedArenaChunk<T> {
M
Mark Rousskov 已提交
62
        TypedArenaChunk { storage: RawVec::with_capacity(capacity), entries: 0 }
63 64
    }

65
    /// Destroys this arena chunk.
66
    #[inline]
67
    unsafe fn destroy(&mut self, len: usize) {
68 69
        // The branch on needs_drop() is an -O1 performance optimization.
        // Without the branch, dropping TypedArena<u8> takes linear time.
70
        if mem::needs_drop::<T>() {
D
Daniel Micay 已提交
71
            let mut start = self.start();
72
            // Destroy all allocated objects.
73
            for _ in 0..len {
74 75
                ptr::drop_in_place(start);
                start = start.offset(1);
76 77 78 79 80 81
            }
        }
    }

    // Returns a pointer to the first allocated object.
    #[inline]
82 83
    fn start(&self) -> *mut T {
        self.storage.ptr()
84 85 86 87
    }

    // Returns a pointer to the end of the allocated space.
    #[inline]
88
    fn end(&self) -> *mut T {
89
        unsafe {
90 91 92 93
            if mem::size_of::<T>() == 0 {
                // A pointer as large as possible for zero-sized elements.
                !0 as *mut T
            } else {
94
                self.start().add(self.storage.capacity())
95
            }
96 97 98 99
        }
    }
}

100 101 102 103 104
// The arenas start with PAGE-sized chunks, and then each new chunk is twice as
// big as its predecessor, up until we reach HUGE_PAGE-sized chunks, whereupon
// we stop growing. This scales well, from arenas that are barely used up to
// arenas that are used for 100s of MiBs. Note also that the chosen sizes match
// the usual sizes of pages and huge pages on Linux.
105
const PAGE: usize = 4096;
106
const HUGE_PAGE: usize = 2 * 1024 * 1024;
107

108
impl<T> Default for TypedArena<T> {
109
    /// Creates a new `TypedArena`.
110
    fn default() -> TypedArena<T> {
111 112 113
        TypedArena {
            // We set both `ptr` and `end` to 0 so that the first call to
            // alloc() will trigger a grow().
114 115
            ptr: Cell::new(ptr::null_mut()),
            end: Cell::new(ptr::null_mut()),
116 117
            chunks: RefCell::new(vec![]),
            _own: PhantomData,
118 119
        }
    }
120
}
121

122
impl<T> TypedArena<T> {
P
P1start 已提交
123
    /// Allocates an object in the `TypedArena`, returning a reference to it.
124
    #[inline]
125
    pub fn alloc(&self, object: T) -> &mut T {
126
        if self.ptr == self.end {
M
Mark-Simulacrum 已提交
127
            self.grow(1)
128
        }
129

130
        unsafe {
131
            if mem::size_of::<T>() == 0 {
M
Mark Rousskov 已提交
132
                self.ptr.set(intrinsics::arith_offset(self.ptr.get() as *mut u8, 1) as *mut T);
133
                let ptr = mem::align_of::<T>() as *mut T;
134 135 136 137 138 139 140 141 142 143 144
                // Don't drop the object. This `write` is equivalent to `forget`.
                ptr::write(ptr, object);
                &mut *ptr
            } else {
                let ptr = self.ptr.get();
                // Advance the pointer.
                self.ptr.set(self.ptr.get().offset(1));
                // Write into uninitialized memory.
                ptr::write(ptr, object);
                &mut *ptr
            }
145
        }
146 147
    }

148
    #[inline]
149 150 151 152
    fn can_allocate(&self, additional: usize) -> bool {
        let available_bytes = self.end.get() as usize - self.ptr.get() as usize;
        let additional_bytes = additional.checked_mul(mem::size_of::<T>()).unwrap();
        available_bytes >= additional_bytes
153 154
    }

155 156
    /// Ensures there's enough space in the current chunk to fit `len` objects.
    #[inline]
157 158 159 160
    fn ensure_capacity(&self, additional: usize) {
        if !self.can_allocate(additional) {
            self.grow(additional);
            debug_assert!(self.can_allocate(additional));
161 162 163
        }
    }

164 165 166 167 168
    #[inline]
    unsafe fn alloc_raw_slice(&self, len: usize) -> *mut T {
        assert!(mem::size_of::<T>() != 0);
        assert!(len != 0);

169
        self.ensure_capacity(len);
170 171 172 173 174 175

        let start_ptr = self.ptr.get();
        self.ptr.set(start_ptr.add(len));
        start_ptr
    }

176
    /// Allocates a slice of objects that are copied into the `TypedArena`, returning a mutable
M
Mark-Simulacrum 已提交
177
    /// reference to it. Will panic if passed a zero-sized types.
178 179
    ///
    /// Panics:
180
    ///
181 182
    ///  - Zero-sized types
    ///  - Zero-length slices
M
Mark-Simulacrum 已提交
183 184
    #[inline]
    pub fn alloc_slice(&self, slice: &[T]) -> &mut [T]
K
Kagamihime 已提交
185 186 187
    where
        T: Copy,
    {
188 189 190 191 192 193 194 195 196 197
        unsafe {
            let len = slice.len();
            let start_ptr = self.alloc_raw_slice(len);
            slice.as_ptr().copy_to_nonoverlapping(start_ptr, len);
            slice::from_raw_parts_mut(start_ptr, len)
        }
    }

    #[inline]
    pub fn alloc_from_iter<I: IntoIterator<Item = T>>(&self, iter: I) -> &mut [T] {
M
Mark-Simulacrum 已提交
198
        assert!(mem::size_of::<T>() != 0);
C
Camille GILLOT 已提交
199 200 201 202 203 204 205 206 207 208 209 210
        let mut vec: SmallVec<[_; 8]> = iter.into_iter().collect();
        if vec.is_empty() {
            return &mut [];
        }
        // Move the content to the arena by copying it and then forgetting
        // the content of the SmallVec
        unsafe {
            let len = vec.len();
            let start_ptr = self.alloc_raw_slice(len);
            vec.as_ptr().copy_to_nonoverlapping(start_ptr, len);
            vec.set_len(0);
            slice::from_raw_parts_mut(start_ptr, len)
M
Mark-Simulacrum 已提交
211 212 213
        }
    }

214 215
    /// Grows the arena.
    #[inline(never)]
216
    #[cold]
217
    fn grow(&self, additional: usize) {
218
        unsafe {
219
            // We need the element size to convert chunk sizes (ranging from
220 221
            // PAGE to HUGE_PAGE bytes) to element counts.
            let elem_size = cmp::max(1, mem::size_of::<T>());
222
            let mut chunks = self.chunks.borrow_mut();
223
            let mut new_cap;
224
            if let Some(last_chunk) = chunks.last_mut() {
M
Mark-Simulacrum 已提交
225
                let used_bytes = self.ptr.get() as usize - last_chunk.start() as usize;
226 227 228 229 230
                last_chunk.entries = used_bytes / mem::size_of::<T>();

                // If the previous chunk's capacity is less than HUGE_PAGE
                // bytes, then this chunk will be least double the previous
                // chunk's size.
231 232 233
                new_cap = last_chunk.storage.capacity();
                if new_cap < HUGE_PAGE / elem_size {
                    new_cap = new_cap.checked_mul(2).unwrap();
234
                }
235
            } else {
236
                new_cap = PAGE / elem_size;
237
            }
238 239
            // Also ensure that this chunk can fit `additional`.
            new_cap = cmp::max(additional, new_cap);
240

241
            let chunk = TypedArenaChunk::<T>::new(new_cap);
242 243 244
            self.ptr.set(chunk.start());
            self.end.set(chunk.end());
            chunks.push(chunk);
245
        }
246
    }
247

248 249 250 251 252
    /// Clears the arena. Deallocates all but the longest chunk which may be reused.
    pub fn clear(&mut self) {
        unsafe {
            // Clear the last chunk, which is partially filled.
            let mut chunks_borrow = self.chunks.borrow_mut();
L
ljedrz 已提交
253
            if let Some(mut last_chunk) = chunks_borrow.last_mut() {
254
                self.clear_last_chunk(&mut last_chunk);
L
ljedrz 已提交
255
                let len = chunks_borrow.len();
256
                // If `T` is ZST, code below has no effect.
M
Mark Rousskov 已提交
257
                for mut chunk in chunks_borrow.drain(..len - 1) {
258
                    chunk.destroy(chunk.entries);
259
                }
260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287
            }
        }
    }

    // Drops the contents of the last chunk. The last chunk is partially empty, unlike all other
    // chunks.
    fn clear_last_chunk(&self, last_chunk: &mut TypedArenaChunk<T>) {
        // Determine how much was filled.
        let start = last_chunk.start() as usize;
        // We obtain the value of the pointer to the first uninitialized element.
        let end = self.ptr.get() as usize;
        // We then calculate the number of elements to be dropped in the last chunk,
        // which is the filled area's length.
        let diff = if mem::size_of::<T>() == 0 {
            // `T` is ZST. It can't have a drop flag, so the value here doesn't matter. We get
            // the number of zero-sized values in the last and only chunk, just out of caution.
            // Recall that `end` was incremented for each allocated value.
            end - start
        } else {
            (end - start) / mem::size_of::<T>()
        };
        // Pass that to the `destroy` method.
        unsafe {
            last_chunk.destroy(diff);
        }
        // Reset the chunk.
        self.ptr.set(last_chunk.start());
    }
288 289
}

290
unsafe impl<#[may_dangle] T> Drop for TypedArena<T> {
291 292
    fn drop(&mut self) {
        unsafe {
293
            // Determine how much was filled.
294
            let mut chunks_borrow = self.chunks.borrow_mut();
295 296 297 298 299
            if let Some(mut last_chunk) = chunks_borrow.pop() {
                // Drop the contents of the last chunk.
                self.clear_last_chunk(&mut last_chunk);
                // The last chunk will be dropped. Destroy all other chunks.
                for chunk in chunks_borrow.iter_mut() {
300
                    chunk.destroy(chunk.entries);
301
                }
302
            }
303
            // RawVec handles deallocation of `last_chunk` and `self.chunks`.
304 305 306 307
        }
    }
}

P
Piotr Czarnecki 已提交
308 309
unsafe impl<T: Send> Send for TypedArena<T> {}

M
Mark Simulacrum 已提交
310 311 312 313 314 315 316 317 318 319 320 321
pub struct DroplessArena {
    /// A pointer to the next object to be allocated.
    ptr: Cell<*mut u8>,

    /// A pointer to the end of the allocated area. When this pointer is
    /// reached, a new chunk is allocated.
    end: Cell<*mut u8>,

    /// A vector of arena chunks.
    chunks: RefCell<Vec<TypedArenaChunk<u8>>>,
}

J
John Kåre Alsaker 已提交
322 323
unsafe impl Send for DroplessArena {}

324
impl Default for DroplessArena {
J
John Kåre Alsaker 已提交
325
    #[inline]
326
    fn default() -> DroplessArena {
M
Mark Simulacrum 已提交
327
        DroplessArena {
328 329
            ptr: Cell::new(ptr::null_mut()),
            end: Cell::new(ptr::null_mut()),
330
            chunks: Default::default(),
M
Mark Simulacrum 已提交
331 332
        }
    }
333
}
M
Mark Simulacrum 已提交
334

335
impl DroplessArena {
J
John Kåre Alsaker 已提交
336
    #[inline]
J
John Kåre Alsaker 已提交
337
    fn align(&self, align: usize) {
M
Mark Simulacrum 已提交
338 339 340 341 342 343 344
        let final_address = ((self.ptr.get() as usize) + align - 1) & !(align - 1);
        self.ptr.set(final_address as *mut u8);
        assert!(self.ptr <= self.end);
    }

    #[inline(never)]
    #[cold]
345
    fn grow(&self, additional: usize) {
M
Mark Simulacrum 已提交
346 347
        unsafe {
            let mut chunks = self.chunks.borrow_mut();
348
            let mut new_cap;
M
Mark Simulacrum 已提交
349
            if let Some(last_chunk) = chunks.last_mut() {
350 351 352 353 354 355
                // There is no need to update `last_chunk.entries` because that
                // field isn't used by `DroplessArena`.

                // If the previous chunk's capacity is less than HUGE_PAGE
                // bytes, then this chunk will be least double the previous
                // chunk's size.
356 357 358
                new_cap = last_chunk.storage.capacity();
                if new_cap < HUGE_PAGE {
                    new_cap = new_cap.checked_mul(2).unwrap();
M
Mark Simulacrum 已提交
359 360
                }
            } else {
361
                new_cap = PAGE;
M
Mark Simulacrum 已提交
362
            }
363 364
            // Also ensure that this chunk can fit `additional`.
            new_cap = cmp::max(additional, new_cap);
365

366
            let chunk = TypedArenaChunk::<u8>::new(new_cap);
M
Mark Simulacrum 已提交
367 368 369 370 371 372 373
            self.ptr.set(chunk.start());
            self.end.set(chunk.end());
            chunks.push(chunk);
        }
    }

    #[inline]
J
John Kåre Alsaker 已提交
374
    pub fn alloc_raw(&self, bytes: usize, align: usize) -> &mut [u8] {
M
Mark Simulacrum 已提交
375
        unsafe {
J
John Kåre Alsaker 已提交
376 377 378
            assert!(bytes != 0);

            self.align(align);
M
Mark Simulacrum 已提交
379

J
John Kåre Alsaker 已提交
380
            let future_end = intrinsics::arith_offset(self.ptr.get(), bytes as isize);
381
            if (future_end as *mut u8) > self.end.get() {
J
John Kåre Alsaker 已提交
382
                self.grow(bytes);
M
Mark Simulacrum 已提交
383 384 385 386
            }

            let ptr = self.ptr.get();
            // Set the pointer past ourselves
M
Mark Rousskov 已提交
387
            self.ptr.set(intrinsics::arith_offset(self.ptr.get(), bytes as isize) as *mut u8);
J
John Kåre Alsaker 已提交
388 389 390 391 392 393 394 395
            slice::from_raw_parts_mut(ptr, bytes)
        }
    }

    #[inline]
    pub fn alloc<T>(&self, object: T) -> &mut T {
        assert!(!mem::needs_drop::<T>());

M
Mark Rousskov 已提交
396
        let mem = self.alloc_raw(mem::size_of::<T>(), mem::align_of::<T>()) as *mut _ as *mut T;
J
John Kåre Alsaker 已提交
397 398

        unsafe {
M
Mark Simulacrum 已提交
399
            // Write into uninitialized memory.
J
John Kåre Alsaker 已提交
400 401
            ptr::write(mem, object);
            &mut *mem
M
Mark Simulacrum 已提交
402 403 404 405 406 407 408
        }
    }

    /// Allocates a slice of objects that are copied into the `DroplessArena`, returning a mutable
    /// reference to it. Will panic if passed a zero-sized type.
    ///
    /// Panics:
409
    ///
M
Mark Simulacrum 已提交
410 411 412 413
    ///  - Zero-sized types
    ///  - Zero-length slices
    #[inline]
    pub fn alloc_slice<T>(&self, slice: &[T]) -> &mut [T]
K
Kagamihime 已提交
414 415 416
    where
        T: Copy,
    {
417
        assert!(!mem::needs_drop::<T>());
M
Mark Simulacrum 已提交
418
        assert!(mem::size_of::<T>() != 0);
L
ljedrz 已提交
419
        assert!(!slice.is_empty());
M
Mark Simulacrum 已提交
420

M
Mark Rousskov 已提交
421 422
        let mem = self.alloc_raw(slice.len() * mem::size_of::<T>(), mem::align_of::<T>()) as *mut _
            as *mut T;
M
Mark Simulacrum 已提交
423 424

        unsafe {
J
John Kåre Alsaker 已提交
425
            let arena_slice = slice::from_raw_parts_mut(mem, slice.len());
M
Mark Simulacrum 已提交
426 427 428 429
            arena_slice.copy_from_slice(slice);
            arena_slice
        }
    }
430

J
John Kåre Alsaker 已提交
431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447
    #[inline]
    unsafe fn write_from_iter<T, I: Iterator<Item = T>>(
        &self,
        mut iter: I,
        len: usize,
        mem: *mut T,
    ) -> &mut [T] {
        let mut i = 0;
        // Use a manual loop since LLVM manages to optimize it better for
        // slice iterators
        loop {
            let value = iter.next();
            if i >= len || value.is_none() {
                // We only return as many items as the iterator gave us, even
                // though it was supposed to give us `len`
                return slice::from_raw_parts_mut(mem, i);
            }
Y
Yuki Okushi 已提交
448
            ptr::write(mem.add(i), value.unwrap());
J
John Kåre Alsaker 已提交
449 450 451 452
            i += 1;
        }
    }

453 454
    #[inline]
    pub fn alloc_from_iter<T, I: IntoIterator<Item = T>>(&self, iter: I) -> &mut [T] {
J
John Kåre Alsaker 已提交
455
        let iter = iter.into_iter();
456 457 458 459 460 461 462
        assert!(mem::size_of::<T>() != 0);
        assert!(!mem::needs_drop::<T>());

        let size_hint = iter.size_hint();

        match size_hint {
            (min, Some(max)) if min == max => {
463 464 465 466
                // We know the exact number of elements the iterator will produce here
                let len = min;

                if len == 0 {
M
Mark Rousskov 已提交
467
                    return &mut [];
468
                }
469
                let size = len.checked_mul(mem::size_of::<T>()).unwrap();
470
                let mem = self.alloc_raw(size, mem::align_of::<T>()) as *mut _ as *mut T;
M
Mark Rousskov 已提交
471
                unsafe { self.write_from_iter(iter, len, mem) }
472 473 474 475 476 477 478 479 480 481 482
            }
            (_, _) => {
                cold_path(move || -> &mut [T] {
                    let mut vec: SmallVec<[_; 8]> = iter.collect();
                    if vec.is_empty() {
                        return &mut [];
                    }
                    // Move the content to the arena by copying it and then forgetting
                    // the content of the SmallVec
                    unsafe {
                        let len = vec.len();
M
Mark Rousskov 已提交
483 484 485
                        let start_ptr = self
                            .alloc_raw(len * mem::size_of::<T>(), mem::align_of::<T>())
                            as *mut _ as *mut T;
486
                        vec.as_ptr().copy_to_nonoverlapping(start_ptr, len);
487
                        vec.set_len(0);
488 489 490 491 492 493
                        slice::from_raw_parts_mut(start_ptr, len)
                    }
                })
            }
        }
    }
M
Mark Simulacrum 已提交
494 495
}

M
Mazdak Farrokhzad 已提交
496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576
/// Calls the destructor for an object when dropped.
struct DropType {
    drop_fn: unsafe fn(*mut u8),
    obj: *mut u8,
}

unsafe fn drop_for_type<T>(to_drop: *mut u8) {
    std::ptr::drop_in_place(to_drop as *mut T)
}

impl Drop for DropType {
    fn drop(&mut self) {
        unsafe { (self.drop_fn)(self.obj) }
    }
}

/// An arena which can be used to allocate any type.
/// Allocating in this arena is unsafe since the type system
/// doesn't know which types it contains. In order to
/// allocate safely, you must store a PhantomData<T>
/// alongside this arena for each type T you allocate.
#[derive(Default)]
pub struct DropArena {
    /// A list of destructors to run when the arena drops.
    /// Ordered so `destructors` gets dropped before the arena
    /// since its destructor can reference memory in the arena.
    destructors: RefCell<Vec<DropType>>,
    arena: DroplessArena,
}

impl DropArena {
    #[inline]
    pub unsafe fn alloc<T>(&self, object: T) -> &mut T {
        let mem =
            self.arena.alloc_raw(mem::size_of::<T>(), mem::align_of::<T>()) as *mut _ as *mut T;
        // Write into uninitialized memory.
        ptr::write(mem, object);
        let result = &mut *mem;
        // Record the destructor after doing the allocation as that may panic
        // and would cause `object`'s destuctor to run twice if it was recorded before
        self.destructors
            .borrow_mut()
            .push(DropType { drop_fn: drop_for_type::<T>, obj: result as *mut T as *mut u8 });
        result
    }

    #[inline]
    pub unsafe fn alloc_from_iter<T, I: IntoIterator<Item = T>>(&self, iter: I) -> &mut [T] {
        let mut vec: SmallVec<[_; 8]> = iter.into_iter().collect();
        if vec.is_empty() {
            return &mut [];
        }
        let len = vec.len();

        let start_ptr = self
            .arena
            .alloc_raw(len.checked_mul(mem::size_of::<T>()).unwrap(), mem::align_of::<T>())
            as *mut _ as *mut T;

        let mut destructors = self.destructors.borrow_mut();
        // Reserve space for the destructors so we can't panic while adding them
        destructors.reserve(len);

        // Move the content to the arena by copying it and then forgetting
        // the content of the SmallVec
        vec.as_ptr().copy_to_nonoverlapping(start_ptr, len);
        mem::forget(vec.drain(..));

        // Record the destructors after doing the allocation as that may panic
        // and would cause `object`'s destuctor to run twice if it was recorded before
        for i in 0..len {
            destructors.push(DropType {
                drop_fn: drop_for_type::<T>,
                obj: start_ptr.offset(i as isize) as *mut u8,
            });
        }

        slice::from_raw_parts_mut(start_ptr, len)
    }
}

577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631
#[macro_export]
macro_rules! arena_for_type {
    ([][$ty:ty]) => {
        $crate::TypedArena<$ty>
    };
    ([few $(, $attrs:ident)*][$ty:ty]) => {
        ::std::marker::PhantomData<$ty>
    };
    ([$ignore:ident $(, $attrs:ident)*]$args:tt) => {
        $crate::arena_for_type!([$($attrs),*]$args)
    };
}

#[macro_export]
macro_rules! which_arena_for_type {
    ([][$arena:expr]) => {
        ::std::option::Option::Some($arena)
    };
    ([few$(, $attrs:ident)*][$arena:expr]) => {
        ::std::option::Option::None
    };
    ([$ignore:ident$(, $attrs:ident)*]$args:tt) => {
        $crate::which_arena_for_type!([$($attrs),*]$args)
    };
}

#[macro_export]
macro_rules! declare_arena {
    ([], [$($a:tt $name:ident: $ty:ty,)*], $tcx:lifetime) => {
        #[derive(Default)]
        pub struct Arena<$tcx> {
            pub dropless: $crate::DroplessArena,
            drop: $crate::DropArena,
            $($name: $crate::arena_for_type!($a[$ty]),)*
        }

        #[marker]
        pub trait ArenaAllocatable {}

        impl<T: Copy> ArenaAllocatable for T {}

        unsafe trait ArenaField<'tcx>: Sized {
            /// Returns a specific arena to allocate from.
            /// If `None` is returned, the `DropArena` will be used.
            fn arena<'a>(arena: &'a Arena<'tcx>) -> Option<&'a $crate::TypedArena<Self>>;
        }

        unsafe impl<'tcx, T> ArenaField<'tcx> for T {
            #[inline]
            default fn arena<'a>(_: &'a Arena<'tcx>) -> Option<&'a $crate::TypedArena<Self>> {
                panic!()
            }
        }

        $(
632 633
            #[allow(unused_lifetimes)]
            impl<$tcx> ArenaAllocatable for $ty {}
634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661
            unsafe impl<$tcx> ArenaField<$tcx> for $ty {
                #[inline]
                fn arena<'a>(_arena: &'a Arena<$tcx>) -> Option<&'a $crate::TypedArena<Self>> {
                    $crate::which_arena_for_type!($a[&_arena.$name])
                }
            }
        )*

        impl<'tcx> Arena<'tcx> {
            #[inline]
            pub fn alloc<T: ArenaAllocatable>(&self, value: T) -> &mut T {
                if !::std::mem::needs_drop::<T>() {
                    return self.dropless.alloc(value);
                }
                match <T as ArenaField<'tcx>>::arena(self) {
                    ::std::option::Option::Some(arena) => arena.alloc(value),
                    ::std::option::Option::None => unsafe { self.drop.alloc(value) },
                }
            }

            #[inline]
            pub fn alloc_slice<T: ::std::marker::Copy>(&self, value: &[T]) -> &mut [T] {
                if value.is_empty() {
                    return &mut [];
                }
                self.dropless.alloc_slice(value)
            }

662
            pub fn alloc_from_iter<'a, T: ArenaAllocatable>(
663 664 665 666 667 668 669 670 671 672 673 674 675 676 677
                &'a self,
                iter: impl ::std::iter::IntoIterator<Item = T>,
            ) -> &'a mut [T] {
                if !::std::mem::needs_drop::<T>() {
                    return self.dropless.alloc_from_iter(iter);
                }
                match <T as ArenaField<'tcx>>::arena(self) {
                    ::std::option::Option::Some(arena) => arena.alloc_from_iter(iter),
                    ::std::option::Option::None => unsafe { self.drop.alloc_from_iter(iter) },
                }
            }
        }
    }
}

678
#[cfg(test)]
C
chansuke 已提交
679
mod tests;