deno.land / x / deno@v1.28.2 / serde_v8 / magic / v8slice.rs

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
use std::ops::Deref;use std::ops::DerefMut;use std::ops::Range;use std::rc::Rc;
use super::rawbytes;use super::transl8::FromV8;
/// A V8Slice encapsulates a slice that's been borrowed from a JavaScript/// ArrayBuffer object. JavaScript objects can normally be garbage collected,/// but the existence of a V8Slice inhibits this until it is dropped. It/// behaves much like an Arc<[u8]>.////// # Cloning/// Cloning a V8Slice does not clone the contents of the buffer,/// it creates a new reference to that buffer.////// To actually clone the contents of the buffer do/// `let copy = Vec::from(&*zero_copy_buf);`#[derive(Clone)]pub struct V8Slice { pub(crate) store: v8::SharedRef<v8::BackingStore>, pub(crate) range: Range<usize>,}
// SAFETY: unsafe trait must have unsafe implementationunsafe impl Send for V8Slice {}
impl V8Slice { pub fn from_buffer( buffer: v8::Local<v8::ArrayBuffer>, range: Range<usize>, ) -> Result<Self, v8::DataError> { let store = buffer.get_backing_store(); if store.is_shared() { return Err(v8::DataError::BadType { actual: "shared ArrayBufferView", expected: "non-shared ArrayBufferView", }); } Ok(Self { store, range }) }
fn as_slice(&self) -> &[u8] { // SAFETY: v8::SharedRef<v8::BackingStore> is similar to Arc<[u8]>, // it points to a fixed continuous slice of bytes on the heap. // We assume it's initialized and thus safe to read (though may not contain meaningful data) unsafe { &*(&self.store[self.range.clone()] as *const _ as *const [u8]) } }
fn as_slice_mut(&mut self) -> &mut [u8] { #[allow(clippy::cast_ref_to_mut)] // SAFETY: v8::SharedRef<v8::BackingStore> is similar to Arc<[u8]>, // it points to a fixed continuous slice of bytes on the heap. // It's safe-ish to mutate concurrently because it can not be // shrunk/grown/moved/reallocated, thus avoiding dangling refs (unlike a Vec). // Concurrent writes can't lead to meaningful structural invalidation // since we treat them as opaque buffers / "bags of bytes", // concurrent mutation is simply an accepted fact of life. // And in practice V8Slices also do not have overallping read/write phases. // TLDR: permissive interior mutability on slices of bytes is "fine" unsafe { &mut *(&self.store[self.range.clone()] as *const _ as *mut [u8]) } }}
pub(crate) fn to_ranged_buffer<'s>( scope: &mut v8::HandleScope<'s>, value: v8::Local<v8::Value>,) -> Result<(v8::Local<'s, v8::ArrayBuffer>, Range<usize>), v8::DataError> { if let Ok(view) = v8::Local::<v8::ArrayBufferView>::try_from(value) { let (offset, len) = (view.byte_offset(), view.byte_length()); let buffer = view.buffer(scope).ok_or(v8::DataError::NoData { expected: "view to have a buffer", })?; let buffer = v8::Local::new(scope, buffer); // recreate handle to avoid lifetime issues return Ok((buffer, offset..offset + len)); } let b: v8::Local<v8::ArrayBuffer> = value.try_into()?; let b = v8::Local::new(scope, b); // recreate handle to avoid lifetime issues Ok((b, 0..b.byte_length()))}
impl FromV8 for V8Slice { fn from_v8( scope: &mut v8::HandleScope, value: v8::Local<v8::Value>, ) -> Result<Self, crate::Error> { to_ranged_buffer(scope, value) .and_then(|(b, r)| Self::from_buffer(b, r)) .map_err(|_| crate::Error::ExpectedBuffer) }}
impl Deref for V8Slice { type Target = [u8]; fn deref(&self) -> &[u8] { self.as_slice() }}
impl DerefMut for V8Slice { fn deref_mut(&mut self) -> &mut [u8] { self.as_slice_mut() }}
impl AsRef<[u8]> for V8Slice { fn as_ref(&self) -> &[u8] { self.as_slice() }}
impl AsMut<[u8]> for V8Slice { fn as_mut(&mut self) -> &mut [u8] { self.as_slice_mut() }}
// Implement V8Slice -> bytes::Bytesimpl V8Slice { fn rc_into_byte_parts(self: Rc<Self>) -> (*const u8, usize, *mut V8Slice) { let (ptr, len) = { let slice = self.as_ref(); (slice.as_ptr(), slice.len()) }; let rc_raw = Rc::into_raw(self); let data = rc_raw as *mut V8Slice; (ptr, len, data) }}
impl From<V8Slice> for bytes::Bytes { fn from(v8slice: V8Slice) -> Self { let (ptr, len, data) = Rc::new(v8slice).rc_into_byte_parts(); rawbytes::RawBytes::new_raw(ptr, len, data.cast(), &V8SLICE_VTABLE) }}
// NOTE: in the limit we could avoid extra-indirection and use the C++ shared_ptr// but we can't store both the underlying data ptr & ctrl ptr ... so instead we// use a shared rust ptr (Rc/Arc) that itself controls the C++ shared_ptrconst V8SLICE_VTABLE: rawbytes::Vtable = rawbytes::Vtable { clone: v8slice_clone, drop: v8slice_drop, to_vec: v8slice_to_vec,};
unsafe fn v8slice_clone( data: &rawbytes::AtomicPtr<()>, ptr: *const u8, len: usize,) -> bytes::Bytes { let rc = Rc::from_raw(*data as *const V8Slice); let (_, _, data) = rc.clone().rc_into_byte_parts(); std::mem::forget(rc); // NOTE: `bytes::Bytes` does bounds checking so we trust its ptr, len inputs // and must use them to allow cloning Bytes it has sliced rawbytes::RawBytes::new_raw(ptr, len, data.cast(), &V8SLICE_VTABLE)}
unsafe fn v8slice_to_vec( data: &rawbytes::AtomicPtr<()>, ptr: *const u8, len: usize,) -> Vec<u8> { let rc = Rc::from_raw(*data as *const V8Slice); std::mem::forget(rc); // NOTE: `bytes::Bytes` does bounds checking so we trust its ptr, len inputs // and must use them to allow cloning Bytes it has sliced Vec::from_raw_parts(ptr as _, len, len)}
unsafe fn v8slice_drop( data: &mut rawbytes::AtomicPtr<()>, _: *const u8, _: usize,) { drop(Rc::from_raw(*data as *const V8Slice))}
deno

Version Info

Tagged at
a year ago