This commit is contained in:
2024-08-05 20:57:09 +08:00
commit 46d9ee7795
3020 changed files with 1725767 additions and 0 deletions

View File

@@ -0,0 +1,24 @@
menuconfig RT_USING_ADT
bool "Enable ADT(abstract data type)"
default y if ARCH_MM_MMU
default n
config RT_USING_ADT_AVL
bool "AVL tree"
depends on RT_USING_ADT
default y
config RT_USING_ADT_BITMAP
bool "Bitmap"
depends on RT_USING_ADT
default y
config RT_USING_ADT_HASHMAP
bool "HashMap"
depends on RT_USING_ADT
default y
config RT_USING_ADT_REF
bool "Reference API"
depends on RT_USING_ADT
default y

View File

@@ -0,0 +1,15 @@
from building import *
cwd = GetCurrentDir()
list = os.listdir(cwd)
objs = []
if not GetDepend(['RT_USING_ADT']):
Return('objs')
for d in list:
path = os.path.join(cwd, d)
if os.path.isfile(os.path.join(path, 'SConscript')):
objs = objs + SConscript(os.path.join(d, 'SConscript'))
Return('objs')

View File

@@ -0,0 +1,9 @@
from building import *
cwd = GetCurrentDir()
src = Glob('*.c')
CPPPATH = [cwd]
group = []
group = DefineGroup('LIBADT', src, depend = ['RT_USING_ADT_AVL'], CPPPATH = CPPPATH)
Return('group')

View File

@@ -0,0 +1,242 @@
/*
* Copyright (c) 2006-2022, RT-Thread Development Team
*
* SPDX-License-Identifier: Apache-2.0
*
* Change Logs:
* Date Author Notes
* 2019-10-12 Jesven first version
* 2022-11-14 WangXiaoyao Optimize footprint and performance
* Export as ADT for generic use case
*/
#include <stddef.h>
#include <stdint.h>
#include "avl.h"
#define HEIGHT_OF(node) ((node) ? (node)->height : 0)
#define IS_RCHILD(node) (!((node) - ((node)->parent->avl_right)))
#define IS_LCHILD(node) (!((node) - ((node)->parent->avl_left)))
#define NODE_PLACE(node) \
IS_LCHILD(node) ? &(node)->parent->avl_left : &(node)->parent->avl_right
static inline void rotate_right(struct util_avl_struct *axis,
struct util_avl_struct *lchild,
struct util_avl_struct *lrchild,
struct util_avl_struct **nodeplace,
size_t lrheight)
{
axis->avl_left = lrchild;
lchild->avl_right = axis;
axis->height = lrheight + 1;
lchild->height = axis->height + 1;
lchild->parent = axis->parent;
axis->parent = lchild;
*nodeplace = lchild;
if (lrchild != NULL)
lrchild->parent = axis;
}
static inline void midmount_right(struct util_avl_struct *axis,
struct util_avl_struct *lchild,
struct util_avl_struct *lrchild,
struct util_avl_struct **nodeplace,
size_t lrheight)
{
lchild->avl_right = lrchild->avl_left;
axis->avl_left = lrchild->avl_right;
lrchild->avl_left = lchild;
lrchild->avl_right = axis;
lrchild->height = lchild->height;
lchild->height = lrheight;
axis->height = lrheight;
lrchild->parent = axis->parent;
lchild->parent = lrchild;
axis->parent = lrchild;
if (lchild->avl_right != NULL)
lchild->avl_right->parent = lchild;
if (axis->avl_left != NULL)
axis->avl_left->parent = axis;
*nodeplace = lrchild;
}
static inline void rotate_left(struct util_avl_struct *axis,
struct util_avl_struct *rchild,
struct util_avl_struct *rlchild,
struct util_avl_struct **nodeplace,
size_t rlheight)
{
axis->avl_right = rlchild;
rchild->avl_left = axis;
axis->height = rlheight + 1;
rchild->height = axis->height + 1;
rchild->parent = axis->parent;
axis->parent = rchild;
*nodeplace = rchild;
if (rlchild != NULL)
rlchild->parent = axis;
}
static inline void midmount_left(struct util_avl_struct *axis,
struct util_avl_struct *rchild,
struct util_avl_struct *rlchild,
struct util_avl_struct **nodeplace,
size_t rlheight)
{
rchild->avl_left = rlchild->avl_right;
axis->avl_right = rlchild->avl_left;
rlchild->avl_right = rchild;
rlchild->avl_left = axis;
rlchild->height = rchild->height;
rchild->height = rlheight;
axis->height = rlheight;
rlchild->parent = axis->parent;
rchild->parent = rlchild;
axis->parent = rlchild;
if (rchild->avl_left != NULL)
rchild->avl_left->parent = rchild;
if (axis->avl_right != NULL)
axis->avl_right->parent = axis;
*nodeplace = rlchild;
}
/**
* @brief avl insertion & delete conceptually contain 2 stage
* 1. insertion/delete of reference
* 2. rebalance
*/
void util_avl_rebalance(struct util_avl_struct *node,
struct util_avl_root *root)
{
if (!node)
return;
struct util_avl_struct *axis = node;
struct util_avl_struct **nodeplace;
do
{
struct util_avl_struct *lchild = axis->avl_left;
struct util_avl_struct *rchild = axis->avl_right;
nodeplace = axis->parent ? NODE_PLACE(axis) : &root->root_node;
int lheight = HEIGHT_OF(lchild);
int rheight = HEIGHT_OF(rchild);
if (rheight + 1 < lheight)
{
struct util_avl_struct *lrchild = lchild->avl_right;
size_t lrheight = HEIGHT_OF(lrchild);
if (HEIGHT_OF(lchild->avl_left) >= lrheight)
{
rotate_right(axis, lchild, lrchild, nodeplace, lrheight);
axis = lchild->parent;
}
else
{
midmount_right(axis, lchild, lrchild, nodeplace, lrheight);
axis = lrchild->parent;
}
}
else if (lheight + 1 < rheight)
{
struct util_avl_struct *rlchild = rchild->avl_left;
size_t rlheight = HEIGHT_OF(rlchild);
if (HEIGHT_OF(rchild->avl_right) >= rlheight)
{
rotate_left(axis, rchild, rlchild, nodeplace, rlheight);
axis = rchild->parent;
}
else
{
midmount_left(axis, rchild, rlchild, nodeplace, rlheight);
axis = rlchild->parent;
}
}
else
{
int height = (lheight < rheight ? rheight : lheight) + 1;
if (height == axis->height)
break;
axis->height = height;
axis = axis->parent;
}
} while (axis);
}
void util_avl_remove(struct util_avl_struct *node, struct util_avl_root *root)
{
struct util_avl_struct **nodeplace;
if (root->root_node == NULL)
return;
if (node->parent != NULL)
{
nodeplace = NODE_PLACE(node);
}
else
{
nodeplace = &root->root_node;
}
/* deletion */
if (node->avl_right == NULL)
{
*nodeplace = node->avl_left;
if (node->avl_left != NULL)
node->avl_left->parent = node->parent;
node = node->parent;
}
else
{
struct util_avl_struct *rchild = node->avl_right;
if (rchild->avl_left == NULL)
{
*nodeplace = rchild;
rchild->avl_left = node->avl_left;
if (rchild->avl_left != NULL)
rchild->avl_left->parent = rchild;
rchild->parent = node->parent;
util_avl_rebalance(rchild, root);
node = rchild->parent;
}
else
{
struct util_avl_struct *successor = rchild->avl_left;
struct util_avl_struct *sparent = rchild;
while (successor->avl_left != NULL)
{
sparent = successor;
successor = successor->avl_left;
}
*nodeplace = successor;
sparent->avl_left = successor->avl_right;
successor->avl_left = node->avl_left;
successor->avl_right = node->avl_right;
if (successor->avl_left != NULL)
successor->avl_left->parent = successor;
successor->avl_right->parent = successor;
if (sparent->avl_left != NULL)
sparent->avl_left->parent = sparent;
successor->parent = node->parent;
util_avl_rebalance(sparent, root);
node = successor;
}
}
/* rebalance */
util_avl_rebalance(node, root);
return;
}

View File

@@ -0,0 +1,116 @@
/*
* Copyright (c) 2006-2020, RT-Thread Development Team
*
* SPDX-License-Identifier: Apache-2.0
*
* Change Logs:
* Date Author Notes
* 2019-10-12 Jesven first version
* 2022-11-14 WangXiaoyao Optimize for generic use case
* and performance
*/
#ifndef __UTIL_TREE_AVL_H__
#define __UTIL_TREE_AVL_H__
#include <rtdef.h>
#include <stdint.h>
struct util_avl_struct
{
struct util_avl_struct *avl_left;
struct util_avl_struct *avl_right;
struct util_avl_struct *parent;
size_t height;
};
#define AVL_ROOT ((struct util_avl_struct *)0)
struct util_avl_root
{
struct util_avl_struct *root_node;
};
void util_avl_rebalance(struct util_avl_struct *node,
struct util_avl_root *root);
void util_avl_remove(struct util_avl_struct *node, struct util_avl_root *root);
static inline void util_avl_link(struct util_avl_struct *new_node,
struct util_avl_struct *parent,
struct util_avl_struct **nodeplace)
{
new_node->avl_left = AVL_ROOT;
new_node->avl_right = AVL_ROOT;
new_node->parent = parent;
new_node->height = 1;
*nodeplace = new_node;
}
static inline struct util_avl_struct *util_avl_next(
struct util_avl_struct *node)
{
struct util_avl_struct *successor = 0;
if (node)
{
if (node->avl_right)
{
node = node->avl_right;
while (node->avl_left)
node = node->avl_left;
successor = node;
}
else
{
while ((successor = node->parent) && (node == successor->avl_right))
node = successor;
}
}
return successor;
}
static inline struct util_avl_struct *util_avl_prev(
struct util_avl_struct *node)
{
struct util_avl_struct *predecessor = 0;
if (node)
{
if (node->avl_left)
{
node = node->avl_left;
while (node->avl_right)
node = node->avl_right;
predecessor = node;
}
else
{
while ((predecessor = node->parent) &&
(node == predecessor->avl_left))
node = predecessor;
}
}
return predecessor;
}
static inline struct util_avl_struct *util_avl_first(struct util_avl_root *root)
{
struct util_avl_struct *first = root->root_node;
if (first)
{
while (first->avl_left)
first = first->avl_left;
}
return first;
}
static inline struct util_avl_struct *util_avl_last(struct util_avl_root *root)
{
struct util_avl_struct *last = root->root_node;
if (last)
{
while (last->avl_right)
last = last->avl_right;
}
return last;
}
#endif /* __UTIL_TREE_AVL_H__ */

View File

@@ -0,0 +1,9 @@
from building import *
cwd = GetCurrentDir()
src = list()
CPPPATH = [cwd]
group = []
group = DefineGroup('LIBADT', src, depend = ['RT_USING_ADT_BITMAP'], CPPPATH = CPPPATH)
Return('group')

View File

@@ -0,0 +1,77 @@
/*
* Copyright (c) 2006-2022, RT-Thread Development Team
*
* SPDX-License-Identifier: Apache-2.0
*
* Change Logs:
* Date Author Notes
* 2022-6-27 GuEe-GUI first version
*/
#ifndef __UTIL_BITMAP_H__
#define __UTIL_BITMAP_H__
#include <rtdef.h>
typedef rt_ubase_t rt_bitmap_t;
#define RT_BITMAP_BITS_MIN (sizeof(rt_bitmap_t) * 8)
#define RT_BITMAP_LEN(bits) (((bits) + (RT_BITMAP_BITS_MIN) - 1) / (RT_BITMAP_BITS_MIN))
#define RT_BITMAP_BIT_LEN(nr) (nr * RT_BITMAP_BITS_MIN)
#define RT_BITMAP_DECLARE(name, bits) rt_bitmap_t name[RT_BITMAP_LEN(bits)]
rt_inline void rt_bitmap_set_bit(rt_bitmap_t *bitmap, rt_uint32_t bit)
{
bitmap[bit / RT_BITMAP_BITS_MIN] |= (1UL << (bit & (RT_BITMAP_BITS_MIN - 1)));
}
rt_inline rt_bool_t rt_bitmap_test_bit(rt_bitmap_t *bitmap, rt_uint32_t bit)
{
return !!(bitmap[bit / RT_BITMAP_BITS_MIN] & (1UL << (bit & (RT_BITMAP_BITS_MIN - 1))));
}
rt_inline void rt_bitmap_clear_bit(rt_bitmap_t *bitmap, rt_uint32_t bit)
{
bitmap[bit / RT_BITMAP_BITS_MIN] &= ~(1UL << (bit & (RT_BITMAP_BITS_MIN - 1)));
}
rt_inline rt_size_t rt_bitmap_next_set_bit(rt_bitmap_t *bitmap, rt_size_t start, rt_size_t limit)
{
rt_size_t bit;
for (bit = start; bit < limit && !rt_bitmap_test_bit(bitmap, bit); ++bit)
{
}
return bit;
}
rt_inline rt_size_t rt_bitmap_next_clear_bit(rt_bitmap_t *bitmap, rt_size_t start, rt_size_t limit)
{
rt_size_t bit;
for (bit = start; bit < limit && rt_bitmap_test_bit(bitmap, bit); ++bit)
{
}
return bit;
}
#define rt_bitmap_for_each_bit_from(state, bitmap, from, bit, limit) \
for ((bit) = rt_bitmap_next_##state##_bit((bitmap), (from), (limit)); \
(bit) < (limit); \
(bit) = rt_bitmap_next_##state##_bit((bitmap), (bit + 1), (limit)))
#define rt_bitmap_for_each_set_bit_from(bitmap, from, bit, limit) \
rt_bitmap_for_each_bit_from(set, bitmap, from, bit, limit)
#define rt_bitmap_for_each_set_bit(bitmap, bit, limit) \
rt_bitmap_for_each_set_bit_from(bitmap, 0, bit, limit)
#define rt_bitmap_for_each_clear_bit_from(bitmap, from, bit, limit) \
rt_bitmap_for_each_bit_from(clear, bitmap, from, bit, limit)
#define rt_bitmap_for_each_clear_bit(bitmap, bit, limit) \
rt_bitmap_for_each_clear_bit_from(bitmap, 0, bit, limit)
#endif /* __UTIL_BITMAP_H__ */

View File

@@ -0,0 +1,9 @@
from building import *
cwd = GetCurrentDir()
src = list()
CPPPATH = [cwd]
group = []
group = DefineGroup('LIBADT', src, depend = ['RT_USING_ADT_HASHMAP'], CPPPATH = CPPPATH)
Return('group')

View File

@@ -0,0 +1,41 @@
/*
* Copyright (c) 2006-2022, RT-Thread Development Team
*
* SPDX-License-Identifier: Apache-2.0
*
* Change Logs:
* Date Author Notes
* 2022-08-01 GuEe-GUI first version
*/
#ifndef __UTIL_HASHMAP_H__
#define __UTIL_HASHMAP_H__
#include <rtdef.h>
/*
* http://www.citi.umich.edu/techreports/reports/citi-tr-00-1.pdf
*
* GoldenRatio = ~(Math.pow(2, 32) / ((Math.sqrt(5) - 1) / 2)) + 1
*/
#define RT_HASHMAP_GOLDEN_RATIO_32 0x61C88647
#define RT_HASHMAP_GOLDEN_RATIO_64 0X61C8864680B583EBULL
rt_inline rt_uint32_t rt_hashmap_32(rt_uint32_t val, rt_uint32_t bits)
{
/* High bits are more random, so use them. */
return (val * RT_HASHMAP_GOLDEN_RATIO_32) >> (32 - bits);
}
rt_inline rt_uint32_t rt_hashmap_64(rt_uint64_t val, rt_uint32_t bits)
{
#ifdef ARCH_CPU_64BIT
/* 64x64-bit multiply is efficient on all 64-bit processors */
return val * RT_HASHMAP_GOLDEN_RATIO_64 >> (64 - bits);
#else
/* Hash 64 bits using only 32x32-bit multiply. */
return rt_hashmap_32((rt_uint32_t)val ^ ((val >> 32) * RT_HASHMAP_GOLDEN_RATIO_32), bits);
#endif
}
#endif /* __UTIL_HASHMAP_H__ */

View File

@@ -0,0 +1,9 @@
from building import *
cwd = GetCurrentDir()
src = list()
CPPPATH = [cwd]
group = []
group = DefineGroup('LIBADT', src, depend = ['RT_USING_ADT_REF'], CPPPATH = CPPPATH)
Return('group')

View File

@@ -0,0 +1,75 @@
/*
* Copyright (c) 2006-2023, RT-Thread Development Team
*
* SPDX-License-Identifier: Apache-2.0
*
* Change Logs:
* Date Author Notes
* 2022-3-1 zhouxiaohu first version
* 2023-5-18 GuEe-GUI implemented by rtatomic
*/
#ifndef __UTIL_REF_H__
#define __UTIL_REF_H__
#include <rtatomic.h>
/**
* struct ref must be embedded in an object.
* it acts as a reference counter for the object.
*/
struct rt_ref
{
rt_atomic_t refcount;
};
#define RT_REF_INIT(n) { .refcount = n, }
rt_inline void rt_ref_init(struct rt_ref *r)
{
rt_atomic_store(&r->refcount, 1);
}
rt_inline unsigned int rt_ref_read(struct rt_ref *r)
{
return rt_atomic_load(&r->refcount);
}
/**
* ref_get
* increment reference counter for object.
*/
rt_inline void rt_ref_get(struct rt_ref *r)
{
rt_atomic_add(&r->refcount, 1);
}
/**
* ref_put
* decrement reference counter for object.
* If the reference counter is zero, call release().
*
* Return 1 means the object's reference counter is zero and release() is called.
*/
rt_inline int rt_ref_put(struct rt_ref *r, void (*release)(struct rt_ref *r))
{
if (rt_atomic_dec_and_test(&r->refcount))
{
release(r);
return 1;
}
return 0;
}
/**
* ref_get_unless_zero - Increment refcount for object unless it is zero.
* Return non-zero if the increment succeeded. Otherwise return 0.
*/
rt_inline int rt_ref_get_unless_zero(struct rt_ref *r)
{
return (int)rt_atomic_inc_not_zero(&r->refcount);
}
#endif /* __UTIL_REF_H__ */

View File

@@ -0,0 +1,9 @@
from building import *
cwd = GetCurrentDir()
src = Glob('*.c')
CPPPATH = [cwd]
group = []
group = DefineGroup('LIBADT', src, depend = [], CPPPATH = CPPPATH)
Return('group')

View File

@@ -0,0 +1,16 @@
/*
* Copyright (c) 2006-2023, RT-Thread Development Team
*
* SPDX-License-Identifier: Apache-2.0
*
* Change Logs:
* Date Author Notes
* 2023-11-01 Shell Init ver.
*/
#ifndef __LIBADT_DICT_H__
#define __LIBADT_DICT_H__
#include "rt_uthash.h"
#endif

View File

@@ -0,0 +1,57 @@
/*
* Copyright (c) 2006-2023, RT-Thread Development Team
*
* SPDX-License-Identifier: Apache-2.0
*
* Change Logs:
* Date Author Notes
* 2023-11-01 Shell Porting to RTT API
*/
#ifndef __LIBADT_RT_UTHASH_H__
#define __LIBADT_RT_UTHASH_H__
#include <rtthread.h>
#define uthash_malloc(sz) rt_malloc(sz)
#define uthash_free(ptr, sz) rt_free(ptr)
/**
* for performance consideration, using libc implementations
* as the default case. If you care about the compatibility
* problem, define the RT_UTHASH_CONFIG_COMPATIBILITY_FIRST
* before including the rt_uthash.h.
*/
#ifndef RT_UTHASH_CONFIG_COMPATIBILITY_FIRST
#define uthash_bzero(a, n) memset(a, '\0', n)
#define uthash_strlen(s) strlen(s)
#else
#define uthash_bzero(a, n) rt_memset(a, '\0', n)
#define uthash_strlen(s) rt_strlen(s)
#endif /* RT_UTHASH_CONFIG_COMPATIBILITY_FIRST */
/* if any fatal happen, throw an exception and return a failure */
#define uthash_fatal(msg) \
do \
{ \
LOG_E(msg); \
return -RT_ENOMEM; \
} while (0)
#include "uthash.h"
#define DEFINE_RT_UTHASH_TYPE(entry_name, key_type, key_name) \
typedef struct entry_name \
{ \
key_type key_name; \
UT_hash_handle hh; \
} *entry_name##_t;
#define RT_UTHASH_ADD(head, key_member, keylen_in, value) \
HASH_ADD(hh, head, key_member, keylen_in, value)
#define RT_UTHASH_FIND(head, key_ptr, keylen_in, pval) \
HASH_FIND(hh, head, key_ptr, keylen_in, pval)
#define RT_UTHASH_DELETE(head, pobj) HASH_DELETE(hh, head, pobj)
#endif /* __LIBADT_RT_UTHASH_H__ */

File diff suppressed because it is too large Load Diff