cpp

Coverage Report

Created: 2022-07-20 01:16

/home/uke/oil/mycpp/gc_heap.cc
Line
Count
Source (jump to first uncovered line)
1
// gc_heap.cc
2
3
#include "gc_heap.h"
4
5
#include <sys/mman.h>  // mprotect()
6
7
using gc_heap::Heap;
8
using gc_heap::Local;
9
using gc_heap::Obj;
10
11
namespace gc_heap {
12
13
GLOBAL_STR(kEmptyString, "");
14
15
Heap gHeap;
16
17
// LayoutForwarded and LayoutFixed aren't real types.  You can cast arbitrary
18
// objs to them to access a HOMOGENEOUS REPRESENTATION useful for garbage
19
// collection.
20
21
class LayoutForwarded : public Obj {
22
 public:
23
  Obj* new_location;  // valid if and only if heap_tag_ == Tag::Forwarded
24
};
25
26
// for Tag::FixedSize
27
class LayoutFixed : public Obj {
28
 public:
29
  Obj* children_[16];  // only the entries denoted in field_mask will be valid
30
};
31
32
91
void Space::Init(int space_size) {
33
91
#if GC_PROTECT
34
91
  void* p = mmap(nullptr, space_size, PROT_READ | PROT_WRITE,
35
91
                 MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
36
91
  begin_ = static_cast<char*>(p);
37
#else
38
  begin_ = static_cast<char*>(malloc(space_size));
39
#endif
40
91
  size_ = space_size;
41
42
91
  Clear();
43
91
}
44
45
20
void Space::Free() {
46
20
#if GC_PROTECT
47
20
  Protect();  // There is no way of deallocating I guess
48
#else
49
  free(begin_);
50
#endif
51
20
}
52
53
#if GC_PROTECT
54
378
void Space::Protect() {
55
378
  int m = mprotect(begin_, size_, PROT_NONE);
56
378
  assert(m == 0);
57
378
}
58
59
358
void Space::Unprotect() {
60
358
  int m = mprotect(begin_, size_, PROT_READ | PROT_WRITE);
61
358
  assert(m == 0);
62
358
}
63
#endif
64
65
1.44k
Obj* Heap::Relocate(Obj* obj, Obj* header) {
66
  // Move an object from one space to another.
67
  // If there's no vtable, then obj == header.  Otherwise header points to the
68
  // Obj header, which is right after the vtable.
69
70
1.44k
  switch (header->heap_tag_) {
71
364
  case Tag::Forwarded: {
72
364
    auto f = reinterpret_cast<LayoutForwarded*>(header);
73
364
    return f->new_location;
74
0
  }
75
76
426
  case Tag::Global: {  // e.g. GlobalStr isn't copied or forwarded
77
    // log("*** GLOBAL POINTER");
78
426
    return obj;
79
0
  }
80
81
657
  default: {
82
657
    assert(header->heap_tag_ == Tag::Opaque ||
83
657
           header->heap_tag_ == Tag::FixedSize ||
84
657
           header->heap_tag_ == Tag::Scanned);
85
86
0
    auto new_location = reinterpret_cast<Obj*>(free_);
87
    // Note: if we wanted to save space on ASDL records, we could calculate
88
    // their length from the field_mask here.  How much would it slow down GC?
89
657
    int n = header->obj_len_;
90
657
    assert(n > 0);  // detect common problem
91
0
    memcpy(new_location, obj, n);
92
    // log("memcpy %d bytes from %p -> %p", n, obj, new_location);
93
#if 0
94
    if (obj->heap_tag_ == Tag::Opaque) {
95
      Str* s = static_cast<Str*>(obj);
96
      log("from = %s", s->data_);
97
      Str* s2 = static_cast<Str*>(new_location);
98
      log("to = %s", s2->data_);
99
    }
100
#endif
101
    // aligned() like Heap::Allocate()
102
657
    free_ += aligned(n);
103
104
657
#if GC_DEBUG
105
657
    num_live_objs_++;
106
657
#endif
107
108
657
    auto f = reinterpret_cast<LayoutForwarded*>(header);
109
657
    f->heap_tag_ = Tag::Forwarded;
110
657
    f->new_location = new_location;
111
657
    return new_location;
112
0
  }
113
1.44k
  }  // switch
114
1.44k
}
115
116
2.10k
inline Obj* ObjHeader(Obj* obj) {
117
  // If we see a vtable pointer, return the Obj* header immediately following.
118
  // Otherwise just return Obj itself.
119
2.10k
  return (obj->heap_tag_ & 0x1) == 0
120
2.10k
             ? reinterpret_cast<Obj*>(reinterpret_cast<char*>(obj) +
121
2
                                      sizeof(void*))
122
2.10k
             : obj;
123
2.10k
}
124
125
357
void Heap::Collect() {
126
357
#if GC_DEBUG
127
357
  log("--> COLLECT with %d roots", roots_top_);
128
357
  num_collections_++;
129
357
#endif
130
131
357
#if GC_PROTECT
132
357
  to_space_.Unprotect();
133
357
#endif
134
135
  // If we grew one space, the other one has to catch up.
136
357
  if (to_space_.size_ < from_space_.size_) {
137
9
    to_space_.Free();
138
9
    to_space_.Init(from_space_.size_);
139
9
  }
140
141
357
  char* scan = to_space_.begin_;  // boundary between black and gray
142
357
  free_ = scan;                   // where to copy new entries
143
357
  limit_ = scan + to_space_.size_;
144
145
357
#if GC_DEBUG
146
357
  num_live_objs_ = 0;
147
357
#endif
148
149
1.84k
  for (int i = 0; i < roots_top_; ++i) {
150
1.48k
    Obj** handle = roots_[i];
151
1.48k
    auto root = *handle;
152
#if GC_VERBOSE
153
    log("%d. handle %p", i, handle);
154
    log("    root %p", root);
155
#endif
156
157
1.48k
    if (root) {  // could be nullptr
158
1.15k
      auto header = ObjHeader(root);
159
160
      // This updates the underlying Str/List/Dict with a forwarding pointer,
161
      // i.e. for other objects that are pointing to it
162
1.15k
      Obj* new_location = Relocate(root, header);
163
#if TODO_BUG
164
      for (int j = 0; j < roots_top_; ++j) {
165
        Obj** handle2 = roots_[j];
166
        auto root2 = *handle2;
167
        if (root2) {
168
          switch (root2->heap_tag_) {
169
          case Tag::Forwarded:
170
          case Tag::Global:
171
          case Tag::Opaque:
172
          case Tag::FixedSize:
173
          case Tag::Scanned:
174
            break;
175
          default:
176
            assert(0);  // NOTE(Jesse): Pretty sure this is InvalidCodePath();
177
          }
178
        }
179
      }
180
181
#endif
182
183
      // log("    new location %p", new_location);
184
185
      // This update is for the "double indirection", so future accesses to a
186
      // local variable use the new location
187
1.15k
      *handle = new_location;
188
1.15k
    }
189
1.48k
  }
190
191
1.01k
  while (scan < free_) {
192
657
    auto obj = reinterpret_cast<Obj*>(scan);
193
657
    auto header = ObjHeader(obj);
194
195
657
    switch (header->heap_tag_) {
196
120
    case Tag::FixedSize: {
197
120
      auto fixed = reinterpret_cast<LayoutFixed*>(header);
198
120
      int mask = fixed->field_mask_;
199
2.04k
      for (int i = 0; i < 16; ++i) {
200
1.92k
        if (mask & (1 << i)) {
201
142
          Obj* child = fixed->children_[i];
202
          // log("i = %d, p = %p, heap_tag = %d", i, child, child->heap_tag_);
203
142
          if (child) {
204
142
            auto child_header = ObjHeader(child);
205
            // log("  fixed: child %d from %p", i, child);
206
142
            fixed->children_[i] = Relocate(child, child_header);
207
            // log("  to %p", fixed->children_[i]);
208
142
          }
209
142
        }
210
1.92k
      }
211
120
      break;
212
0
    }
213
11
    case Tag::Scanned: {
214
11
      assert(header == obj);  // no inheritance
215
0
      auto slab = reinterpret_cast<Slab<void*>*>(header);
216
11
      int n = (slab->obj_len_ - kSlabHeaderSize) / sizeof(void*);
217
184
      for (int i = 0; i < n; ++i) {
218
173
        Obj* child = reinterpret_cast<Obj*>(slab->items_[i]);
219
173
        if (child) {  // note: List<> may have nullptr; Dict is sparse
220
148
          auto child_header = ObjHeader(child);
221
148
          slab->items_[i] = Relocate(child, child_header);
222
148
        }
223
173
      }
224
11
      break;
225
0
    }
226
526
    default: {
227
526
      assert(header->heap_tag_ == Tag::Forwarded ||
228
526
             header->heap_tag_ == Tag::Global ||
229
526
             header->heap_tag_ == Tag::Opaque);
230
526
    }
231
232
      // other tags like Tag::Opaque have no children
233
657
    }
234
    // aligned() like Heap::Allocate()
235
657
    scan += aligned(header->obj_len_);
236
657
  }
237
238
  // We just copied everything from_space_ -> to_space_.  Maintain
239
  // invariant of the space we will allocate from next time.
240
357
  from_space_.Clear();
241
357
#if GC_PROTECT
242
357
  from_space_.Protect();
243
  // log("begin = %x", *from_space_.begin_);
244
357
#endif
245
246
357
  Swap();
247
248
#if GC_VERBOSE
249
  Report();
250
#endif
251
357
}
252
253
1.04k
bool str_equals(Str* left, Str* right) {
254
  // Fast path for identical strings.  String deduplication during GC could
255
  // make this more likely.  String interning could guarantee it, allowing us
256
  // to remove memcmp().
257
1.04k
  if (left == right) {
258
7
    return true;
259
7
  }
260
  // obj_len_ equal implies string lengths are equal
261
1.04k
  if (left->obj_len_ == right->obj_len_) {
262
258
    return memcmp(left->data_, right->data_, len(left)) == 0;
263
258
  }
264
783
  return false;
265
1.04k
}
266
267
4
bool maybe_str_equals(Str* left, Str* right) {
268
4
  if (left && right) {
269
2
    return str_equals(left, right);
270
2
  }
271
272
2
  if (!left && !right) {
273
0
    return true;  // None == None
274
0
  }
275
276
2
  return false;  // one is None and one is a Str*
277
2
}
278
279
#if GC_DEBUG
280
2
void ShowFixedChildren(Obj* obj) {
281
2
  assert(obj->heap_tag_ == Tag::FixedSize);
282
0
  auto fixed = reinterpret_cast<LayoutFixed*>(obj);
283
2
  log("MASK:");
284
285
  // Note: can this be optimized with the equivalent x & (x-1) trick?
286
  // We need the index
287
  // There is a de Brjuin sequence solution?
288
  // https://stackoverflow.com/questions/757059/position-of-least-significant-bit-that-is-set
289
290
2
  int mask = fixed->field_mask_;
291
34
  for (int i = 0; i < 16; ++i) {
292
32
    if (mask & (1 << i)) {
293
4
      Obj* child = fixed->children_[i];
294
4
      if (child) {
295
        // make sure we get Tag::Opaque, Tag::Scanned, etc.
296
4
        log("i = %d, p = %p, heap_tag = %d", i, child, child->heap_tag_);
297
4
      }
298
4
    }
299
32
  }
300
2
}
301
#endif
302
303
}  // namespace gc_heap