1 #include <lunaix/mm/valloc.h>
2 #include <lunaix/syslog.h>
3 #include <lunaix/owloysius.h>
9 static morph_t* devtree_obj_root;
10 static struct dt_context dtctx;
13 fdt_load(struct fdt_blob* fdt, ptr_t base)
15 struct fdt_header* hdr;
20 if (hdr->magic != FDT_MAGIC) {
21 FATAL("invalid dtb, unexpected magic: 0x%x, expect: 0x%x",
22 hdr->magic, FDT_MAGIC);
25 fdt->plat_rsvd_base = base + hdr->off_mem_rsvmap;
26 fdt->str_block_base = base + hdr->off_dt_strings;
27 fdt->root.ptr = base + hdr->off_dt_struct;
31 fdt_next_boot_rsvdmem(struct fdt_blob* fdt, fdt_loc_t* loc,
32 struct dt_memory_node* mem)
38 if (!current.rsvd_ent->addr && !current.rsvd_ent->addr) {
42 mem->base = current.rsvd_ent->addr;
43 mem->size = current.rsvd_ent->size;
44 mem->type = FDT_MEM_RSVD;
53 fdt_next_token(fdt_loc_t loc, int* delta_depth)
58 if (fdt_node(loc.token)) {
60 loc.ptr += strlen(loc.node->name) + 1;
61 loc.ptr = ROUNDUP(loc.ptr, sizeof(int));
63 else if (fdt_node_end(loc.token)) {
66 else if (fdt_prop(loc.token)) {
67 loc.ptr += loc.prop->len + 2 * sizeof(int);
68 loc.ptr = ROUNDUP(loc.ptr, sizeof(int));
72 } while (fdt_nope(loc.token));
79 fdt_next_sibling(fdt_loc_t loc, fdt_loc_t* loc_out)
81 int depth = 0, new_depth = 0;
84 loc = fdt_next_token(loc, &new_depth);
89 return !fdt_node_end(loc.token);
93 fdt_descend_into(fdt_loc_t loc)
98 new_loc = fdt_next_token(loc, &depth);
100 return depth != 1 ? loc : new_loc;
104 fdt_find_prop(const struct fdt_blob* fdt, fdt_loc_t loc,
105 const char* name, struct dtp_val* val)
109 loc = fdt_descend_into(loc);
113 if (!fdt_prop(loc.token)) {
117 prop_name = fdt_prop_key(fdt, loc);
119 if (!streq(prop_name, name)) {
124 val->encoded = (dt_enc_t)__prop_val_ptr(loc.prop);
125 val->size = loc.prop->len;
129 } while (fdt_next_sibling(loc, &loc));
135 fdt_memscan_begin(struct fdt_memscan* mscan, const struct fdt_blob* fdt)
141 loc = fdt_descend_into(loc);
143 if (fdt_find_prop(fdt, loc, "#address-cells", &val))
145 mscan->root_addr_c = val.ref->u32_val;
148 if (fdt_find_prop(fdt, loc, "#size-cells", &val))
150 mscan->root_size_c = val.ref->u32_val;
154 mscan->node_type = FDT_MEM_FREE;
159 #define get_size(mscan, val) \
160 (mscan->root_size_c == 1 ? (val)->ref->u32_val : (val)->ref->u64_val)
162 #define get_addr(mscan, val) \
163 (mscan->root_addr_c == 1 ? (val)->ref->u32_val : (val)->ref->u64_val)
166 fdt_memscan_nextnode(struct fdt_memscan* mscan, struct fdt_blob* fdt)
170 struct dtp_val val, reg_val;
173 bool has_reg = false, found = false;
182 if (!fdt_node(loc.token))
185 if (mscan->node_type != FDT_MEM_FREE) {
190 if (streq(loc.node->name, "reserved-memory")) {
191 // dived into /reserved-memory, walking for childrens
192 mscan->node_type = FDT_MEM_RSVD;
193 loc = fdt_descend_into(loc);
197 if (!fdt_find_prop(fdt, loc, "device_type", &val))
200 if (!streq(val.str_val, "memory"))
204 } while (fdt_next_sibling(loc, &next) && !found);
206 if (found) goto _found;
208 // emerged from /reserved-memory, resume walking for /memory
209 if (mscan->node_type != FDT_MEM_FREE) {
210 mscan->node_type = FDT_MEM_FREE;
218 dtpi_init_empty(&mscan->regit);
222 has_reg = fdt_find_prop(fdt, loc, "reg", &val);
223 if (mscan->node_type == FDT_MEM_RSVD) {
229 WARN("malformed memory node");
233 dtpi_init(&mscan->regit, &val);
239 mscan->node_attr.nomap = fdt_find_prop(fdt, loc, "no-map", NULL);
240 mscan->node_attr.reusable = fdt_find_prop(fdt, loc, "reusable", NULL);
244 dtpi_init(&mscan->regit, &val);
245 mscan->node_type = FDT_MEM_RSVD;
249 if (!fdt_find_prop(fdt, loc, "size", &val))
251 WARN("malformed reserved memory child node");
255 mscan->node_type = FDT_MEM_RSVD_DYNAMIC;
256 mscan->node_attr.total_size = get_size(mscan, &val);
258 if (fdt_find_prop(fdt, loc, "alignment", &val)) {
259 mscan->node_attr.alignment = get_size(mscan, &val);
262 if (fdt_find_prop(fdt, loc, "alloc-ranges", &val)) {
263 dtpi_init(&mscan->regit, &val);
270 fdt_memscan_nextrange(struct fdt_memscan* mscan, struct dt_memory_node* mem)
274 if (dtpi_is_empty(&mscan->regit)) {
278 if (!dtpi_has_next(&mscan->regit)) {
282 if (dtpi_next_val(&mscan->regit, &val, mscan->root_addr_c)) {
283 mem->base = get_addr(mscan, &val);
286 if (dtpi_next_val(&mscan->regit, &val, mscan->root_size_c)) {
287 mem->size = get_size(mscan, &val);
290 mem->type = mscan->node_type;
292 if (mem->type == FDT_MEM_RSVD_DYNAMIC) {
293 mem->dyn_alloc_attr = mscan->node_attr;
300 __parse_stdbase_prop(struct fdt_blob* fdt, fdt_loc_t loc,
301 struct dtn_base* node)
303 if (propeq(fdt, loc, "compatible")) {
304 __mkprop_ptr(loc, &node->compat);
307 else if (propeq(fdt, loc, "phandle")) {
308 node->phandle = __prop_getu32(loc);
311 else if (propeq(fdt, loc, "#address-cells")) {
312 node->addr_c = (char)__prop_getu32(loc);
315 else if (propeq(fdt, loc, "#size-cells")) {
316 node->sz_c = (char)__prop_getu32(loc);
319 else if (propeq(fdt, loc, "#interrupt-cells")) {
320 node->intr_c = (char)__prop_getu32(loc);
323 else if (propeq(fdt, loc, "status")) {
324 char peek = loc.prop->val_str[0];
326 node->status = STATUS_OK;
328 else if (peek == 'r') {
329 node->status = STATUS_RSVD;
331 else if (peek == 'd') {
332 node->status = STATUS_DISABLE;
334 else if (peek == 'f') {
335 node->status = STATUS_FAIL;
347 __parse_stdnode_prop(struct fdt_blob* fdt, fdt_loc_t loc, struct dtn* node)
349 if (propeq(fdt, loc, "reg")) {
350 __mkprop_ptr(loc, &node->reg);
353 else if (propeq(fdt, loc, "ranges")) {
354 __mkprop_ptr(loc, &node->ranges);
357 else if (propeq(fdt, loc, "dma-ranges")) {
358 __mkprop_ptr(loc, &node->dma_ranges);
369 __parse_stdflags(struct fdt_blob* fdt, fdt_loc_t loc, struct dtn_base* node)
371 if (propeq(fdt, loc, "dma-coherent")) {
372 node->dma_coherent = true;
375 else if (propeq(fdt, loc, "dma-noncoherent")) {
376 node->dma_ncoherent = true;
379 else if (propeq(fdt, loc, "interrupt-controller")) {
380 node->intr_controll = true;
391 __dt_node_set_name(struct dtn* node, const char* name)
393 changeling_setname(&node->mobj, name);
397 __init_prop_table(struct dtn_base* node)
399 struct dtp_table* propt;
401 propt = valloc(sizeof(*propt));
402 hashtable_init(propt->_op_bucket);
407 #define prop_table_add(node, prop) \
408 hashtable_hash_in( (node)->props->_op_bucket, \
409 &(prop)->ht, (prop)->key.hash);
412 __parse_other_prop(struct fdt_blob* fdt, fdt_loc_t loc, struct dtn_base* node)
418 prop = valloc(sizeof(*prop));
419 key = fdt_prop_key(fdt, loc);
421 prop->key = HSTR(key, strlen(key));
422 __mkprop_ptr(loc, &prop->val);
424 hstr_rehash(&prop->key, HSTR_FULL_HASH);
426 prop_table_add(node, prop);
430 __fill_node(struct fdt_blob* fdt, fdt_loc_t loc, struct dtn* node)
432 if (__parse_stdflags(fdt, loc, &node->base)) {
436 if (__parse_stdbase_prop(fdt, loc, &node->base)) {
440 if (__parse_stdnode_prop(fdt, loc, node)) {
444 if (parse_stdintr_prop(fdt, loc, &node->intr)) {
448 __parse_other_prop(fdt, loc, &node->base);
452 __set_parent(struct dtn_base* parent, struct dtn_base* node)
456 parent_obj = devtree_obj_root;
457 node->parent = parent;
460 node->addr_c = parent->addr_c;
461 node->sz_c = parent->sz_c;
462 node->intr_c = parent->intr_c;
463 parent_obj = dt_mobj(parent);
466 changeling_attach(parent_obj, dt_mobj(node));
470 __init_node_regular(struct dtn* node)
472 __init_prop_table(&node->base);
473 changeling_morph_anon(NULL, node->mobj, dt_morpher);
475 node->intr.parent_hnd = PHND_NULL;
479 __expand_extended_intr(struct dtn_intr* intrupt)
484 struct dtspec_intr* ispec;
487 if (!intrupt->extended) {
488 nr_intrs = intrupt->raw_ispecs.size / sizeof(u32_t);
489 nr_intrs /= intrupt->parent->base.intr_c;
493 arr = intrupt->raw_ispecs;
495 llist_init_head(&intrupt->ext_ispecs);
497 dtpi_init(&it, &arr);
499 while(dtpi_has_next(&it))
501 domain = dtpi_next_hnd(&it);
504 WARN("(intr_extended) malformed phandle");
508 ispec = valloc(sizeof(*ispec));
510 ispec->domain = domain;
511 dtpi_next_val(&it, &ispec->val, domain->base.intr_c);
513 llist_append(&intrupt->ext_ispecs, &ispec->ispecs);
518 intrupt->nr_intrs = nr_intrs;
522 __resolve_phnd_references()
524 struct dtn_base *pos, *n;
525 struct dtn *node, *parent, *default_parent;
526 struct dtn_intr* intrupt;
529 llist_for_each(pos, n, &dtctx.nodes, nodes)
531 node = dtn_from(pos);
532 intrupt = &node->intr;
534 if (intrupt->parent_hnd == PHND_NULL) {
538 phnd = intrupt->parent_hnd;
539 default_parent = (struct dtn*)node->base.parent;
540 parent = default_parent;
542 if (phnd != PHND_NULL) {
543 parent = dt_resolve_phandle(phnd);
547 WARN("dtb: (phnd_resolve) malformed phandle: %d", phnd);
548 parent = default_parent;
551 intrupt->parent = parent;
553 __expand_extended_intr(intrupt);
558 __resolve_inter_map()
560 struct dtn_base *pos, *n;
562 llist_for_each(pos, n, &dtctx.nodes, nodes)
564 dt_resolve_interrupt_map(dtn_from(pos));
569 dt_load(ptr_t dtb_dropoff)
571 llist_init_head(&dtctx.nodes);
572 hashtable_init(dtctx.phnds_table);
574 struct fdt_blob *fdt;
576 *stack[16] = { NULL };
578 int depth = 0, delta = 0, nr_nodes = 0;
579 fdt_loc_t loc, next_loc;
582 fdt_load(&dtctx.fdt, dtb_dropoff);
586 while (!fdt_eof(loc.token))
588 next_loc = fdt_next_token(loc, &delta);
592 ERROR("strange dtb, too deep to dive.");
599 if (fdt_node(loc.token))
603 node = vzalloc(sizeof(struct dtn));
604 __init_node_regular(node);
605 llist_append(&dtctx.nodes, &node->base.nodes);
607 __dt_node_set_name(node, loc.node->name);
610 __set_parent(&stack[depth - 1]->base, &node->base);
617 else if (depth > 1 && fdt_node_end(loc.token))
619 stack[depth - 1] = NULL;
622 else if (fdt_prop(loc.token))
624 node = stack[depth - 1];
626 assert(depth && node);
627 __fill_node(fdt, loc, node);
634 dtctx.root = stack[0];
636 __resolve_phnd_references();
637 __resolve_inter_map();
639 INFO("%d nodes loaded.", nr_nodes);
645 dt_resolve_phandle(dt_phnd_t phandle)
647 struct dtn_base *pos, *n;
648 llist_for_each(pos, n, &dtctx.nodes, nodes)
650 if (pos->phandle == phandle) {
651 return (struct dtn*)pos;
659 __byname_predicate(struct dtn_iter* iter, struct dtn_base* node)
662 const char* be_matched = HSTR_VAL(node->mobj.name);
663 const char* name = (const char*)iter->closure;
665 while (be_matched[i] && name[i])
667 if (be_matched[i] != name[i]) {
678 dt_begin_find_byname(struct dtn_iter* iter,
679 struct dtn* node, const char* name)
681 dt_begin_find(iter, node, __byname_predicate, name);
685 dt_begin_find(struct dtn_iter* iter, struct dtn* node,
686 node_predicate_t pred, void* closure)
688 node = node ? : (struct dtn*)dtctx.root;
690 iter->head = &node->base;
691 iter->matched = NULL;
692 iter->closure = closure;
696 struct dtn_base* base;
697 changeling_for_each(pos, n, &node->mobj)
699 base = &changeling_reveal(pos, dt_morpher)->base;
700 if (pred(iter, base)) {
701 iter->matched = base;
708 dt_find_next(struct dtn_iter* iter,
709 struct dtn_base** matched)
711 if (!dt_found_any(iter)) {
718 head = dt_mobj(iter->head);
719 pos = dt_mobj(iter->matched);
720 *matched = iter->matched;
722 while (&pos->sibs != &head->subs)
724 pos = list_next(pos, morph_t, sibs);
725 node = changeling_reveal(pos, dt_morpher);
727 if (!iter->pred(iter, &node->base)) {
731 iter->matched = &node->base;
739 dt_getprop(struct dtn_base* base, const char* name)
741 struct hstr hashed_name;
745 hashed_name = HSTR(name, strlen(name));
746 hstr_rehash(&hashed_name, HSTR_FULL_HASH);
747 hash = hashed_name.hash;
749 hashtable_hash_foreach(base->props->_op_bucket, hash, pos, n, ht)
751 if (HSTR_EQ(&pos->key, &hashed_name)) {
760 dtpx_compile_proplet(struct dtprop_def* proplet)
763 unsigned int acc = 0;
765 for (i = 0; proplet[i].type && i < 10; ++i)
767 proplet[i].acc_sz = acc;
768 acc += proplet[i].cell;
771 if (proplet[i - 1].type && i == 10) {
772 FATAL("invalid proplet: no terminator detected");
775 proplet[i].acc_sz = acc;
779 dtpx_prepare_with(struct dtpropx* propx, struct dtp_val* prop,
780 struct dtprop_def* proplet)
783 bool has_str = false;
785 for (i = 0; proplet[i].type; ++i);
787 propx->proplet = proplet;
788 propx->proplet_len = i;
789 propx->proplet_sz = proplet[i].acc_sz;
795 dtpx_goto_row(struct dtpropx* propx, int row)
799 loc = propx->proplet_sz;
802 if (loc * sizeof(u32_t) >= propx->raw->size) {
806 propx->row_loc = loc;
811 dtpx_next_row(struct dtpropx* propx)
815 loc = propx->row_loc;
816 loc += propx->proplet_sz;
818 if (loc * sizeof(u32_t) >= propx->raw->size) {
822 propx->row_loc = loc;
827 dtpx_extract_at(struct dtpropx* propx,
828 struct dtprop_xval* val, int col)
830 struct dtprop_def* def;
831 union dtp_baseval* raw;
834 if (unlikely(col >= propx->proplet_len)) {
838 def = &propx->proplet[col];
839 enc = &propx->raw->encoded[propx->row_loc + def->acc_sz];
840 raw = (union dtp_baseval*)enc;
842 val->archetype = def;
847 val->u32 = raw->u32_val;
851 val->u64 = raw->u64_val;
856 ptr_t hnd = raw->phandle;
857 val->phandle = dt_resolve_phandle(hnd);
861 val->composite = enc;
872 dtpx_extract_loc(struct dtpropx* propx,
873 struct dtprop_xval* val, int row, int col)
875 ptr_t loc = propx->row_loc;
877 if (!dtpx_goto_row(propx, row))
881 bool r = dtpx_extract_at(propx, val, col);
882 propx->row_loc = loc;
887 dtpx_extract_row(struct dtpropx* propx, struct dtprop_xval* vals, int len)
889 assert(len == propx->proplet_len);
891 for (int i = 0; i < len; i++)
893 if (!dtpx_extract_at(propx, &vals[i], i)) {
910 devtree_obj_root = changeling_spawn(NULL, NULL);
912 owloysius_fetch_init(__init_devtree, on_sysconf);