提交 bddeb2b1 编写于 作者: B Behdad Esfahbod

Minor renamings of internal inline functions

上级 d652ef29
......@@ -124,14 +124,14 @@ hb_buffer_t::enlarge (unsigned int size)
hb_glyph_info_t *new_info = nullptr;
bool separate_out = out_info != info;
if (unlikely (_hb_unsigned_int_mul_overflows (size, sizeof (info[0]))))
if (unlikely (hb_unsigned_mul_overflows (size, sizeof (info[0]))))
goto done;
while (size >= new_allocated)
new_allocated += (new_allocated >> 1) + 32;
static_assert ((sizeof (info[0]) == sizeof (pos[0])), "");
if (unlikely (_hb_unsigned_int_mul_overflows (new_allocated, sizeof (info[0]))))
if (unlikely (hb_unsigned_mul_overflows (new_allocated, sizeof (info[0]))))
goto done;
new_pos = (hb_glyph_position_t *) realloc (pos, new_allocated * sizeof (pos[0]));
......
......@@ -278,7 +278,7 @@ struct hb_vector_t
}
else
{
bool overflows = (new_allocated < allocated) || _hb_unsigned_int_mul_overflows (new_allocated, sizeof (Type));
bool overflows = (new_allocated < allocated) || hb_unsigned_mul_overflows (new_allocated, sizeof (Type));
if (likely (!overflows))
new_array = (Type *) realloc (arrayZ, new_allocated * sizeof (Type));
}
......
......@@ -89,7 +89,7 @@ struct hb_map_t
{
if (unlikely (!successful)) return false;
unsigned int power = _hb_bit_storage (population * 2 + 8);
unsigned int power = hb_bit_storage (population * 2 + 8);
unsigned int new_size = 1u << power;
item_t *new_items = (item_t *) malloc ((size_t) new_size * sizeof (item_t));
if (unlikely (!new_items))
......
......@@ -230,7 +230,7 @@ struct hb_sanitize_context_t :
inline bool check_array (const void *base, unsigned int record_size, unsigned int len) const
{
const char *p = (const char *) base;
bool overflows = _hb_unsigned_int_mul_overflows (len, record_size);
bool overflows = hb_unsigned_mul_overflows (len, record_size);
unsigned int array_size = record_size * len;
bool ok = !overflows && this->check_range (base, array_size);
......@@ -1178,7 +1178,7 @@ struct BinSearchHeader
{
len.set (v);
assert (len == v);
entrySelector.set (MAX (1u, _hb_bit_storage (v)) - 1);
entrySelector.set (MAX (1u, hb_bit_storage (v)) - 1);
searchRange.set (16 * (1u << entrySelector));
rangeShift.set (v * 16 > searchRange
? 16 * v - searchRange
......
......@@ -89,7 +89,7 @@ struct CmapSubtableFormat4
this->length.set (get_sub_table_size (segments));
this->segCountX2.set (segments.len * 2);
this->entrySelector.set (MAX (1u, _hb_bit_storage (segments.len)) - 1);
this->entrySelector.set (MAX (1u, hb_bit_storage (segments.len)) - 1);
this->searchRange.set (2 * (1u << this->entrySelector));
this->rangeShift.set (segments.len * 2 > this->searchRange
? 2 * segments.len - this->searchRange
......
......@@ -199,7 +199,7 @@ struct hdmx
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) && version == 0 &&
!_hb_unsigned_int_mul_overflows (num_records, size_device_record) &&
!hb_unsigned_mul_overflows (num_records, size_device_record) &&
size_device_record >= DeviceRecord::min_size &&
c->check_range (this, get_size()));
}
......
......@@ -99,7 +99,7 @@ struct ValueFormat : HBUINT16
#endif
inline unsigned int get_len (void) const
{ return _hb_popcount ((unsigned int) *this); }
{ return hb_popcount ((unsigned int) *this); }
inline unsigned int get_size (void) const
{ return get_len () * Value::static_size; }
......@@ -374,7 +374,7 @@ struct AnchorMatrix
{
TRACE_SANITIZE (this);
if (!c->check_struct (this)) return_trace (false);
if (unlikely (_hb_unsigned_int_mul_overflows (rows, cols))) return_trace (false);
if (unlikely (hb_unsigned_mul_overflows (rows, cols))) return_trace (false);
unsigned int count = rows * cols;
if (!c->check_array (matrixZ, matrixZ[0].static_size, count)) return_trace (false);
for (unsigned int i = 0; i < count; i++)
......
......@@ -511,7 +511,7 @@ struct AlternateSubstFormat1
hb_mask_t lookup_mask = c->lookup_mask;
/* Note: This breaks badly if two features enabled this lookup together. */
unsigned int shift = _hb_ctz (lookup_mask);
unsigned int shift = hb_ctz (lookup_mask);
unsigned int alt_index = ((lookup_mask & glyph_mask) >> shift);
if (unlikely (alt_index > alt_set.len || alt_index == 0)) return_trace (false);
......
......@@ -145,7 +145,7 @@ hb_ot_map_builder_t::compile (hb_ot_map_t &m,
{
static_assert ((!(HB_GLYPH_FLAG_DEFINED & (HB_GLYPH_FLAG_DEFINED + 1))), "");
unsigned int global_bit_mask = HB_GLYPH_FLAG_DEFINED + 1;
unsigned int global_bit_shift = _hb_popcount (HB_GLYPH_FLAG_DEFINED);
unsigned int global_bit_shift = hb_popcount (HB_GLYPH_FLAG_DEFINED);
m.global_mask = global_bit_mask;
......@@ -209,7 +209,7 @@ hb_ot_map_builder_t::compile (hb_ot_map_t &m,
bits_needed = 0;
else
/* Limit to 8 bits per feature. */
bits_needed = MIN(8u, _hb_bit_storage (info->max_value));
bits_needed = MIN(8u, hb_bit_storage (info->max_value));
if (!info->max_value || next_bit + bits_needed > 8 * sizeof (hb_mask_t))
continue; /* Feature disabled, or not enough bits. */
......
......@@ -829,12 +829,12 @@ hb_ot_shape_internal (hb_ot_shape_context_t *c)
{
c->buffer->deallocate_var_all ();
c->buffer->scratch_flags = HB_BUFFER_SCRATCH_FLAG_DEFAULT;
if (likely (!_hb_unsigned_int_mul_overflows (c->buffer->len, HB_BUFFER_MAX_LEN_FACTOR)))
if (likely (!hb_unsigned_mul_overflows (c->buffer->len, HB_BUFFER_MAX_LEN_FACTOR)))
{
c->buffer->max_len = MAX (c->buffer->len * HB_BUFFER_MAX_LEN_FACTOR,
(unsigned) HB_BUFFER_MAX_LEN_MIN);
}
if (likely (!_hb_unsigned_int_mul_overflows (c->buffer->len, HB_BUFFER_MAX_OPS_FACTOR)))
if (likely (!hb_unsigned_mul_overflows (c->buffer->len, HB_BUFFER_MAX_OPS_FACTOR)))
{
c->buffer->max_ops = MAX (c->buffer->len * HB_BUFFER_MAX_OPS_FACTOR,
(unsigned) HB_BUFFER_MAX_OPS_MIN);
......
......@@ -362,7 +362,7 @@ typedef const struct _hb_void_t *hb_void_t;
/* Return the number of 1 bits in v. */
template <typename T>
static inline HB_CONST_FUNC unsigned int
_hb_popcount (T v)
hb_popcount (T v)
{
#if (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) && defined(__OPTIMIZE__)
if (sizeof (T) <= sizeof (unsigned int))
......@@ -387,13 +387,13 @@ _hb_popcount (T v)
if (sizeof (T) == 8)
{
unsigned int shift = 32;
return _hb_popcount<uint32_t> ((uint32_t) v) + _hb_popcount ((uint32_t) (v >> shift));
return hb_popcount<uint32_t> ((uint32_t) v) + hb_popcount ((uint32_t) (v >> shift));
}
if (sizeof (T) == 16)
{
unsigned int shift = 64;
return _hb_popcount<uint64_t> ((uint64_t) v) + _hb_popcount ((uint64_t) (v >> shift));
return hb_popcount<uint64_t> ((uint64_t) v) + hb_popcount ((uint64_t) (v >> shift));
}
assert (0);
......@@ -403,7 +403,7 @@ _hb_popcount (T v)
/* Returns the number of bits needed to store number */
template <typename T>
static inline HB_CONST_FUNC unsigned int
_hb_bit_storage (T v)
hb_bit_storage (T v)
{
if (unlikely (!v)) return 0;
......@@ -466,8 +466,8 @@ _hb_bit_storage (T v)
if (sizeof (T) == 16)
{
unsigned int shift = 64;
return (v >> shift) ? _hb_bit_storage<uint64_t> ((uint64_t) (v >> shift)) + shift :
_hb_bit_storage<uint64_t> ((uint64_t) v);
return (v >> shift) ? hb_bit_storage<uint64_t> ((uint64_t) (v >> shift)) + shift :
hb_bit_storage<uint64_t> ((uint64_t) v);
}
assert (0);
......@@ -477,7 +477,7 @@ _hb_bit_storage (T v)
/* Returns the number of zero bits in the least significant side of v */
template <typename T>
static inline HB_CONST_FUNC unsigned int
_hb_ctz (T v)
hb_ctz (T v)
{
if (unlikely (!v)) return 0;
......@@ -539,8 +539,8 @@ _hb_ctz (T v)
if (sizeof (T) == 16)
{
unsigned int shift = 64;
return (uint64_t) v ? _hb_bit_storage<uint64_t> ((uint64_t) v) :
_hb_bit_storage<uint64_t> ((uint64_t) v >> shift) + shift;
return (uint64_t) v ? hb_bit_storage<uint64_t> ((uint64_t) v) :
hb_bit_storage<uint64_t> ((uint64_t) v >> shift) + shift;
}
assert (0);
......@@ -548,18 +548,19 @@ _hb_ctz (T v)
}
static inline bool
_hb_unsigned_int_mul_overflows (unsigned int count, unsigned int size)
hb_unsigned_mul_overflows (unsigned int count, unsigned int size)
{
return (size > 0) && (count >= ((unsigned int) -1) / size);
}
static inline unsigned int
_hb_ceil_to_4 (unsigned int v)
hb_ceil_to_4 (unsigned int v)
{
return ((v - 1) | 3) + 1;
}
static inline bool _hb_ispow2 (unsigned int v)
static inline bool
hb_ispow2 (unsigned int v)
{
return 0 == (v & (v - 1));
}
......@@ -925,7 +926,7 @@ _hb_round (double x)
static inline int
_hb_memalign(void **memptr, size_t alignment, size_t size)
{
if (unlikely (!_hb_ispow2 (alignment) ||
if (unlikely (!hb_ispow2 (alignment) ||
!alignment ||
0 != (alignment & (sizeof (void *) - 1))))
return EINVAL;
......
......@@ -94,7 +94,7 @@ struct hb_set_t
{
unsigned int pop = 0;
for (unsigned int i = 0; i < len (); i++)
pop += _hb_popcount (v[i]);
pop += hb_popcount (v[i]);
return pop;
}
......@@ -161,8 +161,8 @@ struct hb_set_t
static const unsigned int PAGE_BITS = 512;
static_assert ((PAGE_BITS & ((PAGE_BITS) - 1)) == 0, "");
static inline unsigned int elt_get_min (const elt_t &elt) { return _hb_ctz (elt); }
static inline unsigned int elt_get_max (const elt_t &elt) { return _hb_bit_storage (elt) - 1; }
static inline unsigned int elt_get_min (const elt_t &elt) { return hb_ctz (elt); }
static inline unsigned int elt_get_max (const elt_t &elt) { return hb_bit_storage (elt) - 1; }
typedef hb_vector_size_t<elt_t, PAGE_BITS / 8> vector_t;
......
......@@ -154,7 +154,7 @@ _hb_subset_face_data_reference_blob (hb_subset_face_data_t *data)
unsigned int face_length = table_count * 16 + 12;
for (unsigned int i = 0; i < table_count; i++)
face_length += _hb_ceil_to_4 (hb_blob_get_length (data->tables.arrayZ[i].blob));
face_length += hb_ceil_to_4 (hb_blob_get_length (data->tables.arrayZ[i].blob));
char *buf = (char *) malloc (face_length);
if (unlikely (!buf))
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册