提交 95df00ae 编写于 作者: B Behdad Esfahbod

Hide a few static methods

Looks like static methods that do not get inlined end up exported.
We have a lot more.  Need to protect all at some point.  Wish there
was an easier way, like the visibility flag we pass that automatically
hides all inline methods.

Was exposed by check-symbols.sh when compiling on OS X 10.14 with:

$ make CPPFLAGS=-Oz CXXFLAGS=-flto=thin LDFLAGS=-lc++
上级 2f4be4ba
......@@ -39,7 +39,7 @@ struct hb_aat_feature_mapping_t
hb_aat_layout_feature_selector_t selectorToEnable;
hb_aat_layout_feature_selector_t selectorToDisable;
static int cmp (const void *key_, const void *entry_)
HB_INTERNAL static int cmp (const void *key_, const void *entry_)
{
hb_tag_t key = * (unsigned int *) key_;
const hb_aat_feature_mapping_t * entry = (const hb_aat_feature_mapping_t *) entry_;
......
......@@ -66,7 +66,7 @@ struct hb_aat_map_builder_t
hb_aat_layout_feature_selector_t setting;
unsigned seq; /* For stable sorting only. */
static int cmp (const void *pa, const void *pb)
HB_INTERNAL static int cmp (const void *pa, const void *pb)
{
const feature_info_t *a = (const feature_info_t *) pa;
const feature_info_t *b = (const feature_info_t *) pb;
......
......@@ -100,7 +100,7 @@ struct hb_array_t : hb_iter_with_fallback_t<hb_array_t<Type>, Type&>
return (int) a.length - (int) length;
return hb_memcmp (a.arrayZ, arrayZ, get_size ());
}
static int cmp (const void *pa, const void *pb)
HB_INTERNAL static int cmp (const void *pa, const void *pb)
{
hb_array_t<Type> *a = (hb_array_t<Type> *) pa;
hb_array_t<Type> *b = (hb_array_t<Type> *) pb;
......
......@@ -410,7 +410,7 @@ struct active_feature_t {
feature_record_t rec;
unsigned int order;
static int cmp (const void *pa, const void *pb) {
HB_INTERNAL static int cmp (const void *pa, const void *pb) {
const active_feature_t *a = (const active_feature_t *) pa;
const active_feature_t *b = (const active_feature_t *) pb;
return a->rec.feature < b->rec.feature ? -1 : a->rec.feature > b->rec.feature ? 1 :
......@@ -428,7 +428,7 @@ struct feature_event_t {
bool start;
active_feature_t feature;
static int cmp (const void *pa, const void *pb) {
HB_INTERNAL static int cmp (const void *pa, const void *pb) {
const feature_event_t *a = (const feature_event_t *) pa;
const feature_event_t *b = (const feature_event_t *) pb;
return a->index < b->index ? -1 : a->index > b->index ? 1 :
......
......@@ -56,7 +56,7 @@ typedef struct TableRecord
{
int cmp (Tag t) const { return -t.cmp (tag); }
static int cmp (const void *pa, const void *pb)
HB_INTERNAL static int cmp (const void *pa, const void *pb)
{
const TableRecord *a = (const TableRecord *) pa;
const TableRecord *b = (const TableRecord *) pb;
......
......@@ -63,7 +63,8 @@ struct IntType
operator wide_type () const { return v; }
bool operator == (const IntType<Type,Size> &o) const { return (Type) v == (Type) o.v; }
bool operator != (const IntType<Type,Size> &o) const { return !(*this == o); }
static int cmp (const IntType<Type,Size> *a, const IntType<Type,Size> *b) { return b->cmp (*a); }
HB_INTERNAL static int cmp (const IntType<Type,Size> *a, const IntType<Type,Size> *b)
{ return b->cmp (*a); }
template <typename Type2>
int cmp (Type2 a) const
{
......
......@@ -285,7 +285,7 @@ struct CmapSubtableFormat4
*glyph = gid;
return true;
}
static bool get_glyph_func (const void *obj, hb_codepoint_t codepoint, hb_codepoint_t *glyph)
HB_INTERNAL static bool get_glyph_func (const void *obj, hb_codepoint_t codepoint, hb_codepoint_t *glyph)
{
return ((const accelerator_t *) obj)->get_glyph (codepoint, glyph);
}
......@@ -1096,18 +1096,18 @@ struct cmap
hb_codepoint_t *glyph);
template <typename Type>
static bool get_glyph_from (const void *obj,
hb_codepoint_t codepoint,
hb_codepoint_t *glyph)
HB_INTERNAL static bool get_glyph_from (const void *obj,
hb_codepoint_t codepoint,
hb_codepoint_t *glyph)
{
const Type *typed_obj = (const Type *) obj;
return typed_obj->get_glyph (codepoint, glyph);
}
template <typename Type>
static bool get_glyph_from_symbol (const void *obj,
hb_codepoint_t codepoint,
hb_codepoint_t *glyph)
HB_INTERNAL static bool get_glyph_from_symbol (const void *obj,
hb_codepoint_t codepoint,
hb_codepoint_t *glyph)
{
const Type *typed_obj = (const Type *) obj;
if (likely (typed_obj->get_glyph (codepoint, glyph)))
......
......@@ -153,7 +153,7 @@ struct BaseCoord
struct FeatMinMaxRecord
{
static int cmp (const void *key_, const void *entry_)
HB_INTERNAL static int cmp (const void *key_, const void *entry_)
{
hb_tag_t key = * (hb_tag_t *) key_;
const FeatMinMaxRecord &entry = * (const FeatMinMaxRecord *) entry_;
......@@ -271,7 +271,7 @@ struct BaseValues
struct BaseLangSysRecord
{
static int cmp (const void *key_, const void *entry_)
HB_INTERNAL static int cmp (const void *key_, const void *entry_)
{
hb_tag_t key = * (hb_tag_t *) key_;
const BaseLangSysRecord &entry = * (const BaseLangSysRecord *) entry_;
......@@ -345,7 +345,7 @@ struct BaseScript
struct BaseScriptList;
struct BaseScriptRecord
{
static int cmp (const void *key_, const void *entry_)
HB_INTERNAL static int cmp (const void *key_, const void *entry_)
{
hb_tag_t key = * (hb_tag_t *) key_;
const BaseScriptRecord &entry = * (const BaseScriptRecord *) entry_;
......
......@@ -173,15 +173,15 @@ struct ValueFormat : HBUINT16
return true;
}
static OffsetTo<Device>& get_device (Value* value)
HB_INTERNAL static OffsetTo<Device>& get_device (Value* value)
{ return *CastP<OffsetTo<Device> > (value); }
static const OffsetTo<Device>& get_device (const Value* value, bool *worked=nullptr)
HB_INTERNAL static const OffsetTo<Device>& get_device (const Value* value, bool *worked=nullptr)
{
if (worked) *worked |= bool (*value);
return *CastP<OffsetTo<Device> > (value);
}
static const HBINT16& get_short (const Value* value, bool *worked=nullptr)
HB_INTERNAL static const HBINT16& get_short (const Value* value, bool *worked=nullptr)
{
if (worked) *worked |= bool (*value);
return *CastP<HBINT16> (value);
......@@ -1576,7 +1576,7 @@ struct PosLookup : Lookup
dispatch (&c);
}
static bool apply_recurse_func (hb_ot_apply_context_t *c, unsigned int lookup_index);
HB_INTERNAL static bool apply_recurse_func (hb_ot_apply_context_t *c, unsigned int lookup_index);
template <typename context_t>
static typename context_t::return_t dispatch_recurse_func (context_t *c, unsigned int lookup_index);
......
......@@ -1194,7 +1194,7 @@ struct SubstLookup : Lookup
const SubTable& get_subtable (unsigned int i) const
{ return Lookup::get_subtable<SubTable> (i); }
static bool lookup_type_is_reverse (unsigned int lookup_type)
HB_INTERNAL static bool lookup_type_is_reverse (unsigned int lookup_type)
{ return lookup_type == SubTable::ReverseChainSingle; }
bool is_reverse () const
......@@ -1252,7 +1252,7 @@ struct SubstLookup : Lookup
return dispatch (c);
}
static bool apply_recurse_func (hb_ot_apply_context_t *c, unsigned int lookup_index);
HB_INTERNAL static bool apply_recurse_func (hb_ot_apply_context_t *c, unsigned int lookup_index);
SubTable& serialize_subtable (hb_serialize_context_t *c,
unsigned int i)
......@@ -1315,9 +1315,9 @@ struct SubstLookup : Lookup
}
template <typename context_t>
static typename context_t::return_t dispatch_recurse_func (context_t *c, unsigned int lookup_index);
HB_INTERNAL static typename context_t::return_t dispatch_recurse_func (context_t *c, unsigned int lookup_index);
static hb_closure_context_t::return_t dispatch_closure_recurse_func (hb_closure_context_t *c, unsigned int lookup_index)
HB_INTERNAL static hb_closure_context_t::return_t dispatch_closure_recurse_func (hb_closure_context_t *c, unsigned int lookup_index)
{
if (!c->should_visit_lookup (lookup_index))
return hb_void_t ();
......
......@@ -613,7 +613,7 @@ struct hb_get_subtables_context_t :
hb_dispatch_context_t<hb_get_subtables_context_t, hb_void_t, HB_DEBUG_APPLY>
{
template <typename Type>
static bool apply_to (const void *obj, OT::hb_ot_apply_context_t *c)
HB_INTERNAL static bool apply_to (const void *obj, OT::hb_ot_apply_context_t *c)
{
const Type *typed_obj = (const Type *) obj;
return typed_obj->apply (c);
......
......@@ -68,7 +68,7 @@ struct hb_ot_map_t
unsigned short random : 1;
hb_mask_t mask;
static int cmp (const void *pa, const void *pb)
HB_INTERNAL static int cmp (const void *pa, const void *pb)
{
const lookup_map_t *a = (const lookup_map_t *) pa;
const lookup_map_t *b = (const lookup_map_t *) pb;
......@@ -247,7 +247,7 @@ struct hb_ot_map_builder_t
unsigned int default_value; /* for non-global features, what should the unset glyphs take */
unsigned int stage[2]; /* GSUB/GPOS */
static int cmp (const void *pa, const void *pb)
HB_INTERNAL static int cmp (const void *pa, const void *pb)
{
const feature_info_t *a = (const feature_info_t *) pa;
const feature_info_t *b = (const feature_info_t *) pb;
......
......@@ -283,7 +283,7 @@ struct active_feature_t {
OPENTYPE_FEATURE_RECORD rec;
unsigned int order;
static int cmp (const void *pa, const void *pb) {
HB_INTERNAL static int cmp (const void *pa, const void *pb) {
const active_feature_t *a = (const active_feature_t *) pa;
const active_feature_t *b = (const active_feature_t *) pb;
return a->rec.tagFeature < b->rec.tagFeature ? -1 : a->rec.tagFeature > b->rec.tagFeature ? 1 :
......@@ -300,7 +300,7 @@ struct feature_event_t {
bool start;
active_feature_t feature;
static int cmp (const void *pa, const void *pb)
HB_INTERNAL static int cmp (const void *pa, const void *pb)
{
const feature_event_t *a = (const feature_event_t *) pa;
const feature_event_t *b = (const feature_event_t *) pb;
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册