Update bbfdm tools

This commit is contained in:
Amin Ben Romdhane 2023-11-20 13:36:36 +00:00
parent ea08860a34
commit 7ecae3f6d1
27 changed files with 1115 additions and 1150 deletions

1
.gitignore vendored
View file

@ -8,3 +8,4 @@ install_manifest.txt
bbfdmd/ubus/bbfdmd
docs/index.md
__pycache__
out

View file

@ -38,13 +38,14 @@ run_tools_test:
allow_failure: false
script:
- "./gitlab-ci/tools-test.sh"
- "./gitlab-ci/generate_supported_dm.sh"
artifacts:
when: always
paths:
- tools/out/datamodel_default.xml
- tools/out/datamodel_hdm.xml
- tools/out/datamodel.xls
- out/datamodel_default.xml
- out/datamodel_hdm.xml
- out/datamodel.xls
run_libbbfdm_api_functional_test:
stage: functional_test

View file

@ -53,7 +53,7 @@ More explanation on how this daemon works and all supported methods are presente
All APIs exposed by libbbfdm-api are presented in this header file [libbbfdm_api.h](./libbbfdm-api/include/libbbfdm_api.h).
- `tools` folder which contains some tools to generate Data Model in C, JSON, XML and Excel format.
All supported tools are presented in this file[BBFDM Tools](./docs/guide/tools.md)
All supported tools are presented in this file[BBFDM Tools](./tools/README.md)
- `docs` folder which contains all documentation files.
@ -62,7 +62,7 @@ All supported tools are presented in this file[BBFDM Tools](./docs/guide/tools.m
* [BBFDMD Design](./docs/arch/bbfdmd.md)
* [Datamodel extension using JSON plugin](./docs/guide/json_plugin_v1.md)
* [Datamodel Plugins and Microservice](./docs/guide/datamodel_as_microservice.md)
* [BBFDM Tools](./docs/guide/tools.md)
* [BBFDM Tools](./tools/README.md)
## Important Topics
* [Add support of a new Object/Parameter](./docs/guide/obj_param_extension.md)

View file

@ -11,6 +11,12 @@ IF(${BBFDMD_MAX_MSG_LEN})
ADD_DEFINITIONS(-DBBFDM_MAX_MSG_LEN=${BBFDMD_MAX_MSG_LEN})
ENDIF()
OPTION(BBF_SCHEMA_FULL_TREE "build with schema full tree" OFF)
IF(BBF_SCHEMA_FULL_TREE)
add_compile_definitions(BBF_SCHEMA_FULL_TREE)
ENDIF(BBF_SCHEMA_FULL_TREE)
FILE(GLOB BBF_SOURCES *.c)
ADD_EXECUTABLE(bbfdmd ${BBF_SOURCES})
TARGET_LINK_LIBRARIES(bbfdmd ubus ubox blobmsg_json dl bbfdm-api)

View file

@ -449,13 +449,17 @@ static int bbfdm_schema_handler(struct ubus_context *ctx, struct ubus_object *ob
blob_buf_init(&data.bb, 0);
if (dm_type == BBFDM_CWMP) {
#ifdef BBF_SCHEMA_FULL_TREE
data.bbf_ctx.isinfo = true;
bbf_dm_get_supported_dm(&data);
#else
if (dm_type == BBFDM_CWMP)
bbfdm_get_names(&data);
ubus_send_reply(ctx, req, data.bb.head);
} else {
else
get_schema_from_blob(&u->dm_schema, &data);
ubus_send_reply(ctx, req, data.bb.head);
}
#endif
ubus_send_reply(ctx, req, data.bb.head);
blob_buf_free(&data.bb);
free_path_list(&paths_list);

View file

@ -17,7 +17,6 @@
extern struct list_head loaded_json_files;
extern struct list_head json_list;
extern struct list_head json_memhead;
extern const char *CONFIG_PLUGIN_PATH;
#define UNUSED __attribute__((unused))

View file

@ -254,38 +254,3 @@ int register_events_to_ubus(struct ubus_context *ctx, struct list_head *ev_list)
return 0;
}
bool is_registered_event(char *name)
{
bool ret = false;
if (!name)
return false;
struct dmctx bbf_ctx = {
.in_param = ROOT_NODE,
.nextlevel = false,
.iscommand = false,
.isevent = true,
.isinfo = false,
.instance_mode = INSTANCE_MODE_NUMBER,
.dm_type = BBFDM_USP
};
bbf_init(&bbf_ctx);
if (0 == bbfdm_cmd_exec(&bbf_ctx, BBF_SCHEMA)) {
struct dm_parameter *param;
list_for_each_entry(param, &bbf_ctx.list_parameter, list) {
if (strcmp(param->name, name) == 0) {
ret = true;
break;
}
}
}
bbf_cleanup(&bbf_ctx);
return ret;
}

View file

@ -17,6 +17,5 @@ struct event_map_list {
void free_ubus_event_handler(struct ubus_context *ctx, struct list_head *ev_list);
int register_events_to_ubus(struct ubus_context *ctx, struct list_head *ev_list);
bool is_registered_event(char *name);
#endif /* EVENT_H */

View file

@ -1,238 +0,0 @@
# BBFDM Tools
BBFDM tools are written in python3 and has below dependencies.
System utilities: python3-pip, libxml2-utils
```bash
$ sudo apt install -y python3-pip
$ sudo apt install -y libxml2-utils
```
Python utilities: jsonschema, xlwt
```bash
$ pip3 install jsonschema xlwt
```
| Tools | Description |
| ----------------------- |:------------------------------------------------------------:|
|convert_dm_json_to_c.py | Convert json mapping to C code for dynamic plugins library. |
|convert_dm_xml_to_json.py| Convert standart xml to Json format. |
|generate_dm.py | Generate list of supported/un-supported parameters based of json input|
|generate_dm_xml.py | Generate list of supported/un-supported parameters in xml format |
|generate_dm_excel.py | Generate list of supported/un-supported parameters in xls format |
|validate_json_plugin.py | Validate json plugin files for dynamic library or standard data model |
> Note: Currently all the tools needs to be executed in tools directory.
## XML->JSON convertor
It is a [python script](../../tools/convert_dm_xml_to_json.py) to convert Data Model from Broadband Forum XML format to JSON format.
```bash
$ ./convert_dm_xml_to_json.py
Usage: ./convert_dm_xml_to_json.py <tr-xxx cwmp xml data model> <tr-xxx usp xml data model> [Object path]
Examples:
- ./convert_dm_xml_to_json.py tr-181-2-*-cwmp-full.xml tr-181-2-*-usp-full.xml Device.
==> Generate the json file of the sub tree Device. in tr181.json
- ./convert_dm_xml_to_json.py tr-104-2-0-2-cwmp-full.xml tr-104-2-0-2-usp-full.xml Device.Services.VoiceService.
==> Generate the json file of the sub tree Device.Services.VoiceService. in tr104.json
- ./convert_dm_xml_to_json.py tr-106-1-2-0-full.xml Device.
==> Generate the json file of the sub tree Device. in tr106.json
Example of xml data model file: https://www.broadband-forum.org/cwmp/tr-181-2-*-cwmp-full.xml
```
## XML generator
[Python script](../../tools/generate_dm_xml.py) to generator list of supported and un-supported Data Model tree in XML for acs supported format: **Broadband Forum schema** and **HDM**.
```bash
$ ./generate_dm_xml.py -h
usage: generate_dm_xml.py [-h] [-r git^https://dev.iopsys.eu/bbf/stunc.git^devel] [-v iopsys] [-p X_IOPSYS_EU_] [-d DEVICE_PROTOCOL_DSLFTR069v1] [-m iopsys] [-u 002207] [-c DG400PRIME] [-n DG400PRIME-A]
[-s 1.2.3.4] [-f BBF] [-o datamodel.xml]
Script to generate list of supported and non-supported parameter in xml format
optional arguments:
-h, --help show this help message and exit
-r git^https://dev.iopsys.eu/bbf/stunc.git^devel, --remote-dm git^https://dev.iopsys.eu/bbf/stunc.git^devel
Includes OBJ/PARAM defined under remote repositories defined as bbf plugin
-v iopsys, --vendor-list iopsys
Generate data model tree with vendor extension OBJ/PARAM.
-p X_IOPSYS_EU_, --vendor-prefix X_IOPSYS_EU_
Generate data model tree using provided vendor prefix for vendor defined objects.
-d DEVICE_PROTOCOL_DSLFTR069v1, --device-protocol DEVICE_PROTOCOL_DSLFTR069v1
Generate data model tree using this device protocol.
-m iopsys, --manufacturer iopsys
Generate data model tree using this manufacturer.
-u 002207, --manufacturer-oui 002207
Generate data model tree using this manufacturer oui.
-c DG400PRIME, --product-class DG400PRIME
Generate data model tree using this product class.
-n DG400PRIME-A, --model-name DG400PRIME-A
Generate data model tree using this model name.
-s 1.2.3.4, --software-version 1.2.3.4
Generate data model tree using this software version.
-f BBF, --format BBF Generate data model tree with HDM format.
-o datamodel.xml, --output datamodel.xml
Generate the output file with given name
Part of BBF-tools, refer Readme for more examples
```
More examples:
```bash
$ ./generate_dm_xml.py -v iopsys -v test
$ ./generate_dm_xml.py -v iopsys -p X_IOPSYS_EU_ -r git^https://dev.iopsys.eu/bbf/stunc.git^devel
$ ./generate_dm_xml.py -f HDM -v iopsys -p X_IOPSYS_EU_ -o iopsys.xml
```
> Note: For the remote data model, *git* is the only proto allowed to use in the *generate_dm_xml.py* script. Therefore, if you want to use vendor extensions from a local repository, you must use the *generate_dm.py* script.
## Excel generator
[Python script](../../tools/generate_dm_excel.py) to generate list of supported and un-supported parameters in excel sheet.
```bash
$ ./generate_dm_excel.py -h
usage: generate_dm_excel.py [-h] -d tr181 [-r git^https://dev.iopsys.eu/bbf/stunc.git^devel] [-v iopsys] [-p X_IOPSYS_EU_] [-o supported_datamodel.xls]
Script to generate list of supported and non-supported parameter in xls format
optional arguments:
-h, --help show this help message and exit
-d tr181, --datamodel tr181
-r git^https://dev.iopsys.eu/bbf/stunc.git^devel, --remote-dm git^https://dev.iopsys.eu/bbf/stunc.git^devel
Includes OBJ/PARAM defined under remote repositories defined as bbf plugin
-v iopsys, --vendor-list iopsys
Generate data model tree with vendor extension OBJ/PARAM
-p X_IOPSYS_EU_, --vendor-prefix X_IOPSYS_EU_
Generate data model tree using provided vendor prefix for vendor defined objects
-o supported_datamodel.xls, --output supported_datamodel.xls
Generate the output file with given name
Part of BBF-tools, refer Readme for more examples
```
More examples:
```bash
$ ./generate_dm_excel.py -d tr181 -v iopsys -o datamodel.xls
$ ./generate_dm_excel.py -d tr181 -d tr104 -v iopsys -o datamodel.xls
$ ./generate_dm_excel.py -d tr181 -v iopsys -p X_IOPSYS_EU_ -r git^https://dev.iopsys.eu/bbf/xmppc.git^devel -o datamodel_iopsys.xls
```
## Validate JSON plugin
It is a [python script](../../tools/validate_json_plugin.py) to validate JSON plugin files for dynamic library or standard data model [TR181](./libbbfdm/dmtree/json/tr181.json), [TR104](./libbbfdm/dmtree/json/tr104.json), etc..
```bash
$ ./tools/validate_json_plugin.py test/files/etc/bbfdm/json/UserInterface.json
$ ./tools/validate_json_plugin.py test/files/etc/bbfdm/json/X_IOPSYS_EU_TEST.json
$ ./tools/validate_json_plugin.py dmtree/json/tr181.json
```
## Data Model generator
This is a pipeline friendly master script to generate the list of supported and un-supported datamodels in xml and xls formats based on provided input in a json file.
Example json file available [here](../../tools/tools_input.json).
```bash
$ Usage: generate_dm.py <input json file>
Examples:
- generate_dm.py tools_input.json
==> Generate all required files defined in tools_input.json file
```
The parameters/keys used in tools_input.json file are mostly self-explanatory but few parameters are required a bit more details.
| Key | Description |
|-----|-------------|
| vendor_list | This option should have the same name of the vendor directory names |
| dm_json_files | This should contain the list of json file path, where each file contains the definition of DM objects/parameters |
| vendor_prefix | The prefix used by vendor for vendor extension in DM objects/parameters |
| output.acs | Currently the tool support two variants of xml definitions of DM objects/parameters |
| | hdm: This variant of xml is compatible with Nokia HDM ACS |
| | default: This contains the generic definition which has the capability to define more descriptive DM objects/parameters |
| output.file_format | xls: An excel file listing the supported and unsupported DM objects/parameters |
> Note: To add more description about the vendor extended DM objects/parameters, it is required to add the definition of the required/related DM objects/parameters in a json file (The json structure should follow same format as given in [tr181.json](../../libbbfdm/dmtree/json/tr181.json)), The same json file need to be defined in dm_json_files list.
The input json file should be defined as follow:
```bash
{
"manufacturer": "iopsys",
"protocol": "DEVICE_PROTOCOL_DSLFTR069v1",
"manufacturer_oui": "002207",
"product_class": "DG400PRIME",
"model_name": "DG400PRIME-A",
"software_version": "1.2.3.4",
"vendor_list": [
"iopsys",
"test"
],
"dm_json_files": [
"../libbbfdm/dmtree/json/tr181.json",
"../libbbfdm/dmtree/json/tr104.json"
]
"vendor_prefix": "X_IOPSYS_EU_",
"plugins": [
{
"repo": "https://dev.iopsys.eu/bbf/mydatamodel.git",
"proto": "git",
"version": "tag/hash/branch",
"dm_files": [
"src/datamodel.c",
"src/additional_datamodel.c"
]
},
{
"repo": "https://dev.iopsys.eu/bbf/mybbfplugin.git",
"proto": "git",
"version": "tag/hash/branch",
"dm_files": [
"dm.c"
]
},
{
"repo": "https://dev.iopsys.eu/bbf/mydatamodeljson.git",
"proto": "git",
"version": "tag/hash/branch",
"dm_files": [
"src/plugin/datamodel.json"
]
},
{
"repo": "/home/iopsys/sdk/mypackage/",
"proto": "local",
"dm_files": [
"src/datamodel.c",
"additional_datamodel.c"
]
},
{
"repo": "/src/feeds/mypackage/",
"proto": "local",
"dm_files": [
"datamodel.c",
"src/datamodel.json"
]
}
],
"output": {
"acs": [
"hdm",
"default"
],
"file_format": [
"xml",
"xls"
],
"output_dir": "./out",
"output_file_prefix": "datamodel"
}
}
```
> Note1: For the local repository, you must use an absolute path as repo option.
> Note2: If proto is not defined in the json config file, then git is used by default as proto option.
- For more examples of tools input json file, you can see this link: [tools_input.json](../../tools/tools_input.json)

1
docs/guide/tools.md Symbolic link
View file

@ -0,0 +1 @@
../../tools/README.md

View file

@ -0,0 +1,73 @@
#!/bin/bash
echo "Generate xml and xls artifacts"
source ./gitlab-ci/shared.sh
mkdir -p /etc/supervisor/conf.d/
cp ./gitlab-ci/core_service.conf /etc/supervisor/conf.d/
is_supervisor_running=0
pp="$(pidof python3)"
[ -n "${pp}" ] && {
if ps -p ${pp}|grep -wq supervisord; then
is_supervisor_running=1
fi
}
if [ "${is_supervisor_running}" -eq "1" ] ; then
# starting base services
supervisorctl reread
supervisorctl update
else
/usr/bin/supervisord -c /etc/supervisor/supervisord.conf
fi
sleep 5
supervisorctl status all
# install required packages
exec_cmd apt update
exec_cmd apt install -y python3-pip libxml2-utils
exec_cmd pip3 install jsonschema xlwt ubus pylint
if [ -n "${CI_SERVER_HOST}" ]; then
echo "machine ${CI_SERVER_HOST}" >>~/.netrc
echo "login gitlab-ci-token" >>~/.netrc
echo "password ${CI_JOB_TOKEN}" >>~/.netrc
fi
# Make sure that all plugins are removed
repo_dir="/etc/bbfdm/plugins"
[ ! -d "${repo_dir}" ] && mkdir -p "${repo_dir}"
rm -f ${repo_dir}/*
# Make sure that all micro-services are removed
rm -rf /etc/app*
if pidof bbfdmd >/dev/null; then
kill -9 $(pidof bbfdmd)
fi
if [ -z "${1}" ]; then
./tools/generate_dm.py tools/tools_input.json
else
if [ ! -f "${1}" ]; then
echo "Invalid input file ${1}"
else
./tools/generate_dm.py "${1}"
fi
fi
check_ret $?
echo "Check if the required tools are generated"
[ ! -f "out/datamodel.xls" ] && echo "Excel file doesn't exist" && exit 1
[ ! -f "out/datamodel_hdm.xml" ] && echo "XML file with HDM format doesn't exist" && exit 1
[ ! -f "out/datamodel_default.xml" ] && echo "XML file with BBF format doesn't exist" && exit 1
echo "Validate datamodel_default generated XML file"
xmllint --schema test/tools/cwmp-datamodel-1-8.xsd out/datamodel_default.xml --noout
check_ret $?
echo "Generation of xml and xls artifacts :: PASS"

View file

@ -18,7 +18,7 @@ function check_ret()
function exec_cmd()
{
echo "executing $@"
$@ >/dev/null
$@ >/dev/null 2>&1
if [ $? -ne 0 ]; then
echo "Failed to execute $@"

View file

@ -4,10 +4,17 @@ echo "Verification of BBF Tools"
pwd
source ./gitlab-ci/shared.sh
cp ./gitlab-ci/core_service.conf /etc/supervisor/conf.d/
# starting base services
supervisorctl reread
supervisorctl update
sleep 5
# install required packages
exec_cmd apt update
exec_cmd apt install -y python3-pip libxml2-utils
exec_cmd pip3 install jsonschema xlwt pylint
exec_cmd pip3 install jsonschema xlwt ubus pylint
echo "Validating PEP8 syntax on tools"
exec_cmd_verbose pylint -d R,C,W0603 tools/*.py
@ -57,29 +64,5 @@ json_path=$(./tools/convert_dm_xml_to_json.py test/tools/tr-135-1-4-1-cwmp-full.
./tools/validate_json_plugin.py $json_path
check_ret $?
echo "********* Validate XML File *********"
if [ -n "${CI_SERVER_HOST}" ]; then
echo "machine ${CI_SERVER_HOST}" >>~/.netrc
echo "login gitlab-ci-token" >>~/.netrc
echo "password ${CI_JOB_TOKEN}" >>~/.netrc
fi
cd tools
./generate_dm.py tools_input.json
check_ret $?
echo "Check if the required tools are generated"
[ ! -f "out/datamodel.xls" ] && echo "Excel file doesn't exist" && exit 1
[ ! -f "out/datamodel_hdm.xml" ] && echo "XML file with HDM format doesn't exist" && exit 1
[ ! -f "out/datamodel_default.xml" ] && echo "XML file with BBF format doesn't exist" && exit 1
cd ..
echo "Validate datamodel_default generated XML file"
xmllint --schema test/tools/cwmp-datamodel-1-8.xsd tools/out/datamodel_default.xml --noout
check_ret $?
date +%s > timestamp.log
echo "Tools Test :: PASS"

View file

@ -5,7 +5,7 @@
"input": {
"type": "DotSo",
"name": "/lib/libbbfdm.so",
"plugin_dir": "/etc/bbfdm/plugins/"
"plugin_dir": "/etc/bbfdm/plugins"
},
"output": {
"type": "UBUS",

View file

@ -11,6 +11,7 @@ IF(${BBF_MAX_OBJECT_INSTANCES})
ENDIF()
OPTION(BBF_VENDOR_EXTENSION "build with vendor extension enabled" ON)
OPTION(BBF_SCHEMA_FULL_TREE "build with schema full tree" OFF)
SET(BBF_PLUGIN_SOURCES plugin/dotso_plugin.c plugin/json_plugin.c)
@ -22,6 +23,10 @@ IF(BBF_VENDOR_EXTENSION)
STRING(REPLACE "," ";" VENDOR_LIST ${BBF_VENDOR_LIST})
ENDIF(BBF_VENDOR_EXTENSION)
IF(BBF_SCHEMA_FULL_TREE)
add_compile_definitions(BBF_SCHEMA_FULL_TREE)
ENDIF(BBF_SCHEMA_FULL_TREE)
FILE(GLOB BBF_API_SOURCES *.c)
ADD_LIBRARY(bbfdm-api SHARED ${BBF_API_SOURCES} ${BBF_PLUGIN_SOURCES} ${BBF_VENDOR_EXTENSION_SOURCES})

View file

@ -320,6 +320,7 @@ static int bbfdatamodel_matches(unsigned int dm_type, const enum bbfdm_type_enum
static bool check_dependency(const char *conf_obj)
{
#ifndef BBF_SCHEMA_FULL_TREE
/* Available cases */
/* one file => "file:/etc/config/network" */
/* multiple files => "file:/etc/config/network,/lib/netifd/proto/dhcp.sh" */
@ -360,6 +361,7 @@ static bool check_dependency(const char *conf_obj)
}
}
}
#endif
return true;
}
@ -468,11 +470,15 @@ static void dm_browse_entry(struct dmctx *dmctx, DMNODE *parent_node, DMOBJ *ent
return;
}
#ifndef BBF_SCHEMA_FULL_TREE
if ((entryobj->browseinstobj && dmctx->isgetschema) || !dmctx->isgetschema) {
#endif
*err = dmctx->method_obj(dmctx, &node, entryobj->permission, entryobj->addobj, entryobj->delobj, entryobj->get_linker, data, instance);
if (dmctx->stop)
return;
#ifndef BBF_SCHEMA_FULL_TREE
}
#endif
if (entryobj->browseinstobj && !dmctx->isgetschema) {
entryobj->browseinstobj(dmctx, &node, data, instance);

View file

@ -196,6 +196,11 @@ pid_t get_pid(const char *pname)
return -1;
}
int compare_strings(const void *a, const void *b)
{
return DM_STRCMP(*(const char **)a, *(const char **)b);
}
char *get_uptime(void)
{
FILE *fp = fopen(UPTIME, "r");

View file

@ -180,8 +180,20 @@ enum option_type_enum {
};
#define sysfs_foreach_file(path,dir,ent) \
if ((dir = opendir(path)) == NULL) return 0; \
while ((ent = readdir (dir)) != NULL) \
if ((dir = opendir(path)) == NULL) return 0; \
while ((ent = readdir(dir)) != NULL) \
#define sysfs_foreach_file_sorted(path,max_num_files) \
struct dirent *ent = NULL; \
DIR *dir = NULL; \
if ((dir = opendir(path)) == NULL) return 0; \
int num_files = 0; \
char *files[max_num_files]; \
while ((ent = readdir(dir)) != NULL && num_files < max_num_files) \
files[num_files++] = dmstrdup(ent->d_name); \
closedir(dir); \
qsort(files, num_files, sizeof(char*), compare_strings); \
for (int i = 0; i < num_files; i++)
struct dmmap_sect {
struct list_head list;
@ -209,6 +221,7 @@ struct dhcp_options_type {
};
pid_t get_pid(const char *pname);
int compare_strings(const void *a, const void *b);
char *get_uptime(void);
int check_file(char *path);
char *cidr2netmask(int bits);

View file

@ -334,8 +334,7 @@ int get_leaf_idx(DMLEAF **entryleaf)
int load_plugins(DMOBJ *dm_entryobj, DM_MAP_VENDOR *dm_VendorExtension[], DM_MAP_VENDOR_EXCLUDE *dm_VendorExtensionExclude, const char *plugin_path)
{
struct dirent *ent = NULL;
DIR *dir = NULL;
int max_num_files = 256;
#ifdef BBF_VENDOR_EXTENSION
// Load objects and parameters exposed via vendor extension plugin
@ -355,21 +354,20 @@ int load_plugins(DMOBJ *dm_entryobj, DM_MAP_VENDOR *dm_VendorExtension[], DM_MAP
free_dotso_plugins();
free_specific_dynamic_node(dm_entryobj, INDX_LIBRARY_MOUNT);
sysfs_foreach_file(plugin_path, dir, ent) {
sysfs_foreach_file_sorted(plugin_path, max_num_files) {
char buf[512] = {0};
snprintf(buf, sizeof(buf), "%s/%s", plugin_path, ent->d_name);
snprintf(buf, sizeof(buf), "%s/%s", plugin_path, files[i]);
if (strstr(ent->d_name, ".json")) {
if (DM_STRSTR(files[i], ".json")) {
load_json_plugins(dm_entryobj, buf);
} else if (strstr(ent->d_name, ".so")) {
} else if (DM_STRSTR(files[i], ".so")) {
load_dotso_plugins(dm_entryobj, buf);
}
dmfree(files[i]);
}
if (dir) {
closedir(dir);
}
return 0;
}

View file

@ -46,7 +46,7 @@ int load_dotso_plugins(DMOBJ *entryobj, const char *plugin_path)
{
void *handle = dlopen(plugin_path, RTLD_NOW|RTLD_LOCAL);
if (!handle) {
fprintf(stderr, "Plugin failed [%s]\n", dlerror());
TRACE("Plugin failed [%s]\n", dlerror());
return 0;
}

View file

@ -1865,8 +1865,10 @@ int load_json_plugins(DMOBJ *entryobj, const char *plugin_path)
int json_plugin_version = JSON_VERSION_0;
json_object *json = json_object_from_file(plugin_path);
if (!json)
if (!json) {
TRACE("Plugin failed [%s]\n", plugin_path);
return 0;
}
json_object_object_foreach(json, key, jobj) {
if (!key)

252
tools/README.md Normal file
View file

@ -0,0 +1,252 @@
# Datamodel diagnostics and support tools
bbfdm offers several tools/utilities to facilitate(s)
- Generation of json based datamodel definition from broadband forum xml based definition
- Generation of "C" code templates based on json definition
- Generate xml definition of supported datamodel for Nokia HDM ACS
- Generate xml definition of supported datamodel for other ACS
- Generate list of supported data models in XML format for both USP and CWMP variants
- Provide tools to validate JSON based datamodel plugins
## Dependencies
Tools are mostly written in python and shell script, some requires docker images
- python3-pip
- libxml2-utils
- docker.io
- jsonschema
- xlwt
- ubus
System utilities: python3-pip, libxml2-utils docker.io
```bash
$ sudo apt install -y python3-pip
$ sudo apt install -y libxml2-utils
```
Python utilities: jsonschema, xlwt, ubus
```bash
$ pip3 install jsonschema xlwt ubus
```
To install docker follow [external link](https://docs.docker.com/engine/install)
## Tools
Below are the list of tools
| Tools | Description |
| ------------------------- | ------------------------------------------------------------ |
| convert_dm_xml_to_json.py | Tool to convert Broadband forum's xml based datamodel definition to JSON based datamodel Definition |
| convert_dm_json_to_c.py | Tool to generate json based datamodel definition with ubus/uci mappings to C code |
| validate_json_plugin.py | Validate json based datamodel plugin files |
| generate_dm.sh | Generate list of supported/un-supported parameters based of json input|
### convert_dm_xml_to_json.py
[Broadband Forum](https://www.broadband-forum.org/) provides TR181 and other datamodel definitions in two formats xml(machine friendly) and html(User friendly),
- [CWMP Specific datamodels](https://cwmp-data-models.broadband-forum.org/#sec:current-data-models)
- [USP specific datamodels](https://usp-data-models.broadband-forum.org/#sec:current-data-models)
In bbfdm, we needed a unified file which can be used for machine translations as well as at the same time readable to humans, so we provide a this tools to convert Data Model from Broadband Forum XML format to JSON format.
This tools can be used as shown below
```bash
$ ./tools/convert_dm_xml_to_json.py
Usage: ./tools/convert_dm_xml_to_json.py <tr-xxx cwmp xml data model> <tr-xxx usp xml data model> [Object path]
Examples:
- ./tools/convert_dm_xml_to_json.py test/tools/tr-181-2-*-cwmp-full.xml test/tools/tr-181-2-*-usp-full.xml Device.
==> Generate the json file of the sub tree Device. in tr181.json
- ./tools/convert_dm_xml_to_json.py test/tools/tr-104-2-0-2-cwmp-full.xml test/tools/tr-104-2-0-2-usp-full.xml Device.Services.VoiceService.
==> Generate the json file of the sub tree Device.Services.VoiceService. in tr104.json
Example of xml data model file: https://www.broadband-forum.org/cwmp/tr-181-2-*-cwmp-full.xml
```
### convert_dm_json_to_c.py
To add the datamodel via bbfdm, it is required to follow [datamodel guide](https://dev.iopsys.eu/bbf/bbfdm/-/blob/devel/docs/guide/datamodel_as_microservice.md), which allows to add the datamodel with json plugins, or with DotSO plugins.
This tool can generate template "C" code from JSON datamodel definitions.
```bash
$ ./tools/convert_dm_json_to_c.py
Usage: ./tools/convert_dm_json_to_c.py <data model name> [Object path]
data model name: The data model(s) to be used, for ex: tr181 or tr181,tr104
Examples:
- ./tools/convert_dm_json_to_c.py tr181
==> Generate the C code of tr181 data model in datamodel/ folder
- ./tools/convert_dm_json_to_c.py tr104
==> Generate the C code of tr104 data model in datamodel/ folder
- ./tools/convert_dm_json_to_c.py tr181,tr104
==> Generate the C code of tr181 and tr104 data model in datamodel/ folder
- ./tools/convert_dm_json_to_c.py tr181 Device.DeviceInfo.
==> Generate the C code of Device.DeviceInfo object in datamodel/ folder
- ./tools/convert_dm_json_to_c.py tr104 Device.Services.VoiceService.{i}.Capabilities.
==> Generate the C code of Device.Services.VoiceService.{i}.Capabilities. object in datamodel/ folder
```
### validate_json_plugin.py
This tool helps in validating the json schema, which is very helpful in the development of a JSON based plugins.
```bash
$ ./tools/validate_json_plugin.py test/files/etc/bbfdm/json/UserInterface.json
$ ./tools/validate_json_plugin.py test/files/etc/bbfdm/json/X_IOPSYS_EU_TEST.json
$ ./tools/validate_json_plugin.py dmtree/json/tr181.json
```
More examples available in [this path](https://dev.iopsys.eu/bbf/bbfdm/-/tree/devel/test/files/etc/bbfdm/plugins).
### generate_dm.sh
This tool generates the list of supported datamodel objects/parameters in xml and xls format, based on the input.
Historically bbfdm tools used to do text parsing to provide list of supported datamodel parameters, which has many limitations:
- Strict binding of datamodel definitions
- Need to maintain specific sequence in definition
This improved tool usages an docker image to get the list of supported datamodel to provide the accurate output.
#### How this works
Based on plugins listed in tools_input.json file, it simulates a runtime environment with docker image and get the datamodel from `bbfdm` ubus object(exposed by bbfdmd) which gets the data from all supported plugins and microservices.
```bash
Usage: ./tools/generate_dm.sh [OPTIONS]...
-I <docker image>
-i json input file path relative to top directory
-h help
examples:
~/git/bbfdm$ ./tools/generate_dm.sh -i tools/tools_input.json
```
The parameters/keys used in tools_input.json file are mostly self-explanatory but few parameters are required a bit more details.
| Key | Description |
|-----|-------------|
| manufacturer | The manufacturer's name, e.g., "IOPSYS" |
| protocol | The device protocol, e.g., "DEVICE_PROTOCOL_DSLFTR069v1 |
| manufacturer_oui | The Manufacturer's Organizationally Unique Identifier (OUI) in hexadecimal format, e.g., "002207" |
| product_class" | The product class, e.g., "DG400PRIME" |
| model_name | The model name, e.g., "DG400PRIME-A" |
| software_version | The software version, e.g., "1.2.3.4" |
| vendor_list | This option should have the same name of the vendor directory names, e.g., ["iopsys"] |
| dm_json_files | This should contain the list of json file path, where each file contains the definition of DM objects/parameters |
| vendor_prefix | The prefix used by vendor for vendor extension in DM objects/parameters, e.g., "X_IOPSYS_EU_" |
| plugins | A list of plugins with associated repositories and data model files |
| | repo: The URL of the plugin repository |
| | version: (optional): The version of the git plugin |
| | dm_files: A list of data model files associated with the plugin |
| | extra_dependencies: (optional): Extra dependencies for the plugin, if any |
| | micro-service: (optional): Information about the micro-service, including its name, parent data model, object, and root object |
| output.acs | Currently the tool support two variants of xml definitions of DM objects/parameters |
| | hdm: This variant of xml is compatible with Nokia HDM ACS |
| | default: This contains the generic definition which has the capability to define more descriptive DM objects/parameters |
| output.file_format | Output file formats, e.g., ["xls", "xml"] |
| output.output_dir | The output directory for generated files, e.g., "./out" |
| output.output_file_prefix | The prefix for output file names, e.g., "datamodel" |
> Note:
> To add more description about the vendor extended DM objects/parameters, it is required to add the definition of the required/related DM objects/parameters in a json file (The json structure should follow same format as given in [tr181.json](../libbbfdm/dmtree/json/tr181.json)), The same json file need to be defined in dm_json_files list.
The input json file should be defined as follow:
```bash
{
"manufacturer": "iopsys",
"protocol": "DEVICE_PROTOCOL_DSLFTR069v1",
"manufacturer_oui": "002207",
"product_class": "DG400PRIME",
"model_name": "DG400PRIME-A",
"software_version": "1.2.3.4",
"vendor_list": [
"iopsys",
"test"
],
"dm_json_files": [
"../libbbfdm/dmtree/json/tr181.json",
"../libbbfdm/dmtree/json/tr104.json"
]
"vendor_prefix": "X_IOPSYS_EU_",
"plugins": [
{
"repo": "https://dev.iopsys.eu/bbf/mydatamodel.git",
"version": "tag/hash/branch",
"dm_files": [
"src/datamodel.c",
"src/additional_datamodel.c"
]
},
{
"repo": "https://dev.iopsys.eu/bbf/mybbfplugin.git",
"version": "tag/hash/branch",
"dm_files": [
"dm.c"
]
},
{
"repo": "https://dev.iopsys.eu/bbf/mydatamodeljson.git",
"version": "tag/hash/branch",
"dm_files": [
"src/plugin/datamodel.json"
],
"micro-service": {
"name": "bbfdm.wifi",
"parent_dm": "Device.WiFi.",
"object": "DataElements",
"root_obj": "bbfdm"
}
},
{
"repo": "/home/iopsys/sdk/mypackage/",
"dm_files": [
"src/datamodel.c",
"additional_datamodel.c"
]
},
{
"repo": "/src/feeds/mypackage/",
"dm_files": [
"datamodel.c",
"src/datamodel.json"
]
}
],
"output": {
"acs": [
"hdm",
"default"
],
"file_format": [
"xml",
"xls"
],
"output_dir": "./out",
"output_file_prefix": "datamodel"
}
}
```
---
**NOTE**
1. All defined plugins will be treated as plugins except the ones that have the micro-service option defined will be treated as micro-services.
2. The `micro-service` option should be defined in the plugin only if you want to overwrite a specific Object introduced in the main tree.
3. All the tools need to be executed from the top directory.
---

View file

@ -3,79 +3,74 @@
# Copyright (C) 2021 iopsys Software Solutions AB
# Author: Amin Ben Ramdhane <amin.benramdhane@pivasoftware.com>
import sys
import os
import subprocess
import shutil
import json
from collections import OrderedDict
import ubus
import time
import glob
CURRENT_PATH = os.getcwd()
ROOT = None
# Constants
BBF_ERROR_CODE = 0
BBF_TR181_ROOT_FILE = "device.c"
BBF_VENDOR_ROOT_FILE = "vendor.c"
BBF_VENDOR_PREFIX = "X_IOPSYS_EU_"
BBF_DMTREE_PATH = CURRENT_PATH + "/../libbbfdm/dmtree"
BBF_DMTREE_PATH_TR181 = BBF_DMTREE_PATH + "/tr181"
BBF_DMTREE_PATH_TR143 = BBF_DMTREE_PATH + "/tr143"
BBF_DMTREE_PATH_TR471 = BBF_DMTREE_PATH + "/tr471"
BBF_DMTREE_PATH_TR181_JSON = BBF_DMTREE_PATH + "/json/tr181.json"
BBF_DMTREE_PATH_TR104_JSON = BBF_DMTREE_PATH + "/json/tr104.json"
DATA_MODEL_FILE = ".data_model.txt"
ARRAY_JSON_FILES = {"tr181": BBF_DMTREE_PATH_TR181_JSON,
"tr104": BBF_DMTREE_PATH_TR104_JSON}
LIST_DM_DIR = [BBF_DMTREE_PATH_TR181, BBF_DMTREE_PATH_TR143, BBF_DMTREE_PATH_TR471]
LIST_IGNORED_LINE = ['/*', '//', '#']
LIST_OBJ = []
LIST_PARAM = []
LIST_SUPPORTED_DM = []
CURRENT_PATH = os.getcwd()
BBF_DMTREE_PATH = os.path.join(CURRENT_PATH, "libbbfdm", "dmtree")
BBF_DMTREE_PATH_TR181_JSON = os.path.join(BBF_DMTREE_PATH, "json", "tr181.json")
BBF_DMTREE_PATH_TR104_JSON = os.path.join(BBF_DMTREE_PATH, "json", "tr104.json")
ARRAY_JSON_FILES = {"tr181": BBF_DMTREE_PATH_TR181_JSON, "tr104": BBF_DMTREE_PATH_TR104_JSON}
Array_Types = {"string": "DMT_STRING",
"unsignedInt": "DMT_UNINT",
"unsignedLong": "DMT_UNLONG",
"int": "DMT_INT",
"long": "DMT_LONG",
"boolean": "DMT_BOOL",
"dateTime": "DMT_TIME",
"hexBinary": "DMT_HEXBIN",
"base64": "DMT_BASE64",
"command": "DMT_COMMAND",
"event": "DMT_EVENT"}
LIST_SUPPORTED_USP_DM = []
LIST_SUPPORTED_CWMP_DM = []
def get_root_node():
return ROOT
Array_Types = {
"string": "DMT_STRING",
"unsignedInt": "DMT_UNINT",
"unsignedLong": "DMT_UNLONG",
"int": "DMT_INT",
"long": "DMT_LONG",
"boolean": "DMT_BOOL",
"dateTime": "DMT_TIME",
"hexBinary": "DMT_HEXBIN",
"base64": "DMT_BASE64",
"command": "DMT_COMMAND",
"event": "DMT_EVENT"
}
def set_root_node(rootdm = "Device."):
global ROOT
ROOT = rootdm
def rename_file(old_file_name, new_file_name):
def rename_file(old_path, new_path):
try:
os.rename(old_file_name, new_file_name)
os.rename(old_path, new_path)
except OSError:
pass
def remove_file(file_name):
def move_file(source_path, destination_path):
shutil.move(source_path, destination_path)
def remove_file(file_path):
try:
os.remove(file_name)
os.remove(file_path)
except OSError:
pass
def create_folder(folder_name):
def create_folder(folder_path):
try:
os.makedirs(folder_name, exist_ok = True)
os.makedirs(folder_path, exist_ok=True)
except OSError:
pass
# rmtree exception handler
def rmtree_handler(_func, path, _exc_info):
print(f'Failed to remove {path}')
def remove_folder(folder_name):
if os.path.isdir(folder_name):
shutil.rmtree(folder_name, onerror = rmtree_handler)
def remove_folder(folder_path):
if os.path.isdir(folder_path):
shutil.rmtree(folder_path, onerror=rmtree_handler)
def cd_dir(path):
try:
@ -90,8 +85,8 @@ def obj_has_child(value):
if isinstance(val, dict):
for obj1, val1 in val.items():
if obj1 == "type" and val1 == "object":
return 1
return 0
return True
return False
def obj_has_param(value):
@ -100,11 +95,20 @@ def obj_has_param(value):
if isinstance(val, dict):
for obj1, val1 in val.items():
if obj1 == "type" and val1 != "object":
return 1
return 0
return True
return False
def get_option_value(value, option, default = None):
def get_vendor_list(val):
vendor_list = ""
if isinstance(val, list):
for vendor in val:
vendor_list = vendor if not vendor_list else (
vendor_list + "," + vendor)
return vendor_list
def get_option_value(value, option, default=None):
if isinstance(value, dict):
for obj, val in value.items():
if obj == option:
@ -113,516 +117,360 @@ def get_option_value(value, option, default = None):
def get_param_type(value):
paramtype = get_option_value(value, "type")
return Array_Types.get(paramtype, None)
param_type = get_option_value(value, "type")
return Array_Types.get(param_type, None)
def get_protocol_from_json(value):
val = get_option_value(value, "protocols", ["cwmp", "usp"])
if "cwmp" in val and "usp" in val:
return "BBFDM_BOTH"
elif "cwmp" in val:
return "BBFDM_CWMP"
else:
return "BBFDM_USP"
def get_description_from_json(value):
val = get_option_value(value, "description", "")
return val
description = get_option_value(value, "description", "")
return description
def get_range_from_json(value):
val = get_option_value(value, "range", [])
return val
range_value = get_option_value(value, "range", [])
return range_value
def get_list_from_json(value):
val = get_option_value(value, "list", {})
return val
list_value = get_option_value(value, "list", {})
return list_value
def get_enum_from_json(value):
val = get_option_value(value, "enumerations", [])
return val
enumerations = get_option_value(value, "enumerations", [])
return enumerations
def is_proto_exist(value, proto):
protocols = get_option_value(value, "protocols", [])
return proto in protocols
def clear_list(input_list):
input_list.clear()
def generate_shared_library(output_library, source_files, vendor_prefix, extra_dependencies):
# Return if source_files (list) is empty
if len(source_files) == 0:
return
# Set vendor prefix
if vendor_prefix is not None:
VENDOR_PREFIX = vendor_prefix
else:
VENDOR_PREFIX = "X_IOPSYS_EU_"
# Ensure that the source files exist
for source_file in source_files:
if not os.path.exists(source_file):
print(f" Error: Source file {source_file} does not exist.")
return False
cmd = ['gcc', '-shared', '-o', output_library, '-fPIC', '-DBBF_VENDOR_PREFIX=\\"{}\\"'.format(VENDOR_PREFIX)] + source_files + extra_dependencies
# Compile the shared library
try:
cmdstr = ' '.join(str(e) for e in cmd)
subprocess.run(cmdstr, shell=True, check=True)
print(f" Shared library {output_library} successfully created.")
return True
except subprocess.CalledProcessError as e:
print(f" Error during compilation: {e}")
return False
def build_and_install_bbfdm(vendor_prefix, vendor_list):
print("Compiling and installing bbfdmd in progress ...")
create_folder(os.path.join(CURRENT_PATH, "build"))
cd_dir(os.path.join(CURRENT_PATH, "build"))
# Set vendor prefix
if vendor_prefix is not None:
VENDOR_PREFIX = vendor_prefix
else:
VENDOR_PREFIX = "X_IOPSYS_EU_"
# Set vendor list
if vendor_list is None:
VENDOR_LIST = "iopsys"
else:
VENDOR_LIST = get_vendor_list(vendor_list)
# Build and install bbfdm
cmake_command = [
"cmake",
"../",
"-DWITH_OPENSSL=ON",
"-DBBF_VENDOR_EXTENSION=ON",
"-DBBF_SCHEMA_FULL_TREE=ON",
f"-DBBF_VENDOR_LIST={VENDOR_LIST}",
f"-DBBF_VENDOR_PREFIX={VENDOR_PREFIX}",
"-DBBF_MAX_OBJECT_INSTANCES=255",
"-DBBFDMD_MAX_MSG_LEN=1048576",
"-DCMAKE_INSTALL_PREFIX=/"
]
make_command = ["make"]
make_install_command = ["make", "install"]
try:
subprocess.check_call(cmake_command, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
subprocess.check_call(make_command, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
subprocess.check_call(make_install_command, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
except subprocess.CalledProcessError as e:
print(f"Error running commands: {e}")
sys.exit(1)
cd_dir(CURRENT_PATH)
remove_folder(os.path.join(CURRENT_PATH, "build"))
print('Compiling and installing bbfdmd done')
def run_command(command):
try:
# Use subprocess.Popen to start the daemon process
subprocess.Popen(command, shell=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
# The daemon process will continue running in the background
time.sleep(1)
except subprocess.CalledProcessError as e:
# Handle subprocess errors here
print(f"Error running the daemon process: {e}")
sys.exit(1)
def create_input_json_file(file_path, input_type, input_name, micro_service_config):
data = {
"daemon": {
"input": {
"type": input_type,
"name": input_name
},
"output": {
"type": "UBUS",
"name": micro_service_config.get("name"),
"parent_dm": micro_service_config.get("parent_dm"),
"object": micro_service_config.get("object"),
"root_obj": micro_service_config.get("root_obj")
}
}
}
with open(file_path, 'w', encoding='utf-8') as json_file:
json.dump(data, json_file, indent=4)
def transform_schema_to_dm(schema_out, dm_list):
if not schema_out or not isinstance(schema_out, list) or len(schema_out) == 0:
return
result_list = schema_out[0].get("results", [])
for result in result_list:
path = result.get("path", "")
data = result.get("data", "0")
permission = "readOnly" if data == "0" else "readWrite"
p_type = result.get("type", "xsd:string")[4:]
entry = {
"param": path,
"permission": permission,
"type": p_type,
}
dm_list.append(entry)
def remove_duplicate_elements(input_list):
unique_values = set()
result_list = []
for item in input_list:
item_value = item["param"]
if item_value not in unique_values:
unique_values.add(item_value)
result_list.append(item)
return result_list
def clean_supported_dm_list():
LIST_SUPPORTED_DM.clear()
def fill_list_supported_dm():
fp = open(DATA_MODEL_FILE, 'r', encoding='utf-8')
Lines = fp.readlines()
# Wait for 5 seconds to be sure that all micro-services started successfully
time.sleep(5)
for line in Lines:
LIST_SUPPORTED_DM.append(line)
# pylint: disable=E1101
ubus.connect()
usp_schema_out = ubus.call('bbfdm', 'schema', {"path": "Device.", "optional": {"proto": "usp"}})
transform_schema_to_dm(usp_schema_out, LIST_SUPPORTED_USP_DM)
LIST_SUPPORTED_USP_DM.sort(key=lambda x: x['param'], reverse=False)
LIST_SUPPORTED_USP_DM[:] = remove_duplicate_elements(LIST_SUPPORTED_USP_DM)
cwmp_schema_out = ubus.call('bbfdm', 'schema', {"path": "Device.", "optional": {"proto": "cwmp"}})
transform_schema_to_dm(cwmp_schema_out, LIST_SUPPORTED_CWMP_DM)
LIST_SUPPORTED_CWMP_DM.sort(key=lambda x: x['param'], reverse=False)
LIST_SUPPORTED_CWMP_DM[:] = remove_duplicate_elements(LIST_SUPPORTED_CWMP_DM)
ubus.disconnect()
# pylint: enable=E1101
def fill_data_model_file():
fp = open(DATA_MODEL_FILE, 'a', encoding='utf-8')
for value in LIST_SUPPORTED_DM:
if (ROOT):
js_val = json.loads(value)
param = get_option_value(js_val, "param")
if param is not None and (param.startswith(ROOT)):
print(f"{value}", file=fp)
else:
print(f"{value}", file=fp)
fp.close()
def get_micro_service_config(micro_service):
parent_dm = get_option_value(micro_service, "parent_dm")
root_obj = get_option_value(micro_service, "root_obj")
obj = get_option_value(micro_service, "object")
name = get_option_value(micro_service, "name")
if not isinstance(micro_service, dict) or None in (parent_dm, root_obj, obj, name):
return None
return {
"parent_dm": parent_dm,
"root_obj": root_obj,
"object": obj,
"name": name
}
def reorganize_parent_child():
organized_dm = []
global LIST_SUPPORTED_DM
for value in LIST_SUPPORTED_DM:
obj = json.loads(value)
o_type = get_option_value(obj, "type", None)
if o_type != "DMT_OBJ":
continue
o_name = get_option_value(obj, "param", None)
if o_name is None:
continue
organized_dm.append(value)
for item in LIST_SUPPORTED_DM:
param = json.loads(item)
p_type = get_option_value(param, "type", None)
if p_type is None or p_type == "DMT_OBJ":
continue
p_name = get_option_value(param, "param", None)
if p_name is None:
continue
if p_name.find(o_name) != -1:
ob_dot = o_name.count('.')
pm_dot = p_name.count('.')
if ob_dot == pm_dot:
organized_dm.append(item)
LIST_SUPPORTED_DM.clear()
LIST_SUPPORTED_DM = organized_dm
def clone_git_repository(repo, version=None):
try:
cmd = ["git", "clone", repo, ".repo"]
if version is not None:
cmd.extend(["-b", version])
subprocess.run(cmd, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, check=True)
return True
except (OSError, subprocess.SubprocessError):
print(f' Failed to clone {repo} !!!!!')
return False
def generate_datamodel_tree(filename):
if filename.endswith('.c') is False:
def get_repo_version_info(repo, version=None):
if version is None:
return repo
return f'{repo}^{version}'
def process_json_file(filename, idx, micro_service, micro_service_config):
if micro_service is None:
move_file(filename, "/etc/bbfdm/plugins")
else:
micro_srv_path = f"/etc/app{idx}"
micro_srv_lib_path = f"{micro_srv_path}/file{idx}.json"
micro_srv_input_path = f"{micro_srv_path}/input.json"
create_folder(micro_srv_path)
move_file(filename, micro_srv_lib_path)
create_input_json_file(micro_srv_input_path, "JSON", micro_srv_lib_path, micro_service_config)
run_command(f"/usr/sbin/bbfdmd -m {micro_srv_input_path}")
def process_c_files(LIST_FILES, vendor_prefix, extra_dependencies, idx, micro_service, micro_service_config):
if micro_service is None:
generate_shared_library(f"/etc/bbfdm/plugins/lib{idx}.so", LIST_FILES, vendor_prefix, extra_dependencies)
else:
micro_srv_path = f"/etc/app{idx}"
micro_srv_lib_path = f"{micro_srv_path}/lib{idx}.so"
micro_srv_input_path = f"{micro_srv_path}/input.json"
create_folder(micro_srv_path)
generate_shared_library(micro_srv_lib_path, LIST_FILES, vendor_prefix, extra_dependencies)
create_input_json_file(micro_srv_input_path, "DotSo", micro_srv_lib_path, micro_service_config)
run_command(f"/usr/sbin/bbfdmd -m {micro_srv_input_path}")
def download_and_build_plugins(plugins, vendor_prefix):
global BBF_ERROR_CODE
if plugins is None or not isinstance(plugins, list) or not plugins:
print("No plugins provided.")
return
LIST_DEL_PARAM = []
obj_found = 0
param_found = 0
obj_found_in_list = 0
table_name = ""
parent_obj = ""
print("Generating data models from defined plugins...")
fp = open(filename, 'r', encoding='utf-8')
for line in fp:
line = line.lstrip()
for plugin_index, plugin in enumerate(plugins):
micro_service_config = None
if line.startswith(tuple(LIST_IGNORED_LINE)) is True:
repo = get_option_value(plugin, "repo")
dm_files = get_option_value(plugin, "dm_files")
extra_dependencies = get_option_value(plugin, "extra_dependencies", [])
if repo is None or dm_files is None or not isinstance(dm_files, list):
print("Necessary input missing")
BBF_ERROR_CODE += 1
continue
if "DMOBJ" in line:
table_name = line[:line.index('[]')].rstrip(
'\n').replace("DMOBJ ", "")
obj_found = 1
micro_service = get_option_value(plugin, "micro-service")
if micro_service is not None:
micro_service_config = get_micro_service_config(micro_service)
if micro_service_config is None:
print("Micro service config not defined")
BBF_ERROR_CODE += 1
continue
print(f' - Processing plugin: {plugin}')
version = get_option_value(plugin, "version")
remove_folder(".repo")
if not clone_git_repository(repo, version):
print(f"Failed to clone {repo}")
BBF_ERROR_CODE += 1
continue
if "DMLEAF" in line:
table_name = line[:line.index('[]')].rstrip(
'\n').replace("DMLEAF ", "")
param_found = 1
continue
print(f' Processing {get_repo_version_info(repo, version)}')
if obj_found == 0 and param_found == 0:
continue
if "{0}" in line.replace(" ", ""):
obj_found = 0
param_found = 0
obj_found_in_list = 0
table_name = ""
parent_obj = ""
for value in LIST_DEL_PARAM:
LIST_PARAM.remove(value)
LIST_DEL_PARAM.clear()
continue
# Object Table
if obj_found == 1:
if obj_found_in_list == 0:
obj_list = LIST_OBJ
for value in obj_list:
val = value.split(":")
if val[1] == table_name:
parent_obj = val[0]
obj_found_in_list = 1
LIST_OBJ.remove(value)
obj = line.rstrip('\n').split(", ")
obj_name = parent_obj + obj[0].replace("{", "").replace("\"", "").replace(
"BBF_VENDOR_PREFIX", BBF_VENDOR_PREFIX).replace(" ", "")
obj_permission = obj[1].replace("&", "").replace(" ", "")
obj_mulinst = obj[5].replace("&", "").replace(" ", "")
obj_protocol = obj[11].replace("}", "").replace(" ", "").replace(",", "")
if obj_mulinst == "NULL":
full_obj_name = obj_name + "."
LIST_FILES = []
os.chdir(".repo/")
for dm_file in dm_files:
filename = dm_file
if filename.endswith('*.c'):
LIST_FILES.extend(glob.glob(filename))
else:
full_obj_name = obj_name + ".{i}."
if os.path.isfile(filename):
if filename.endswith('.c'):
LIST_FILES.append(filename)
elif filename.endswith('.json'):
process_json_file(filename, plugin_index, micro_service, micro_service_config)
else:
print(f"Unknown file format {filename}")
BBF_ERROR_CODE += 1
else:
print(f"File not accessible {filename}")
BBF_ERROR_CODE += 1
LIST_SUPPORTED_DM.append(
"{\"param\":\"" + full_obj_name + "\",\"permission\":\"" + obj_permission + "\",\"type\":\"DMT_OBJ\",\"protocol\":\"" + obj_protocol + "\"}")
if len(LIST_FILES) > 0:
process_c_files(LIST_FILES, vendor_prefix, extra_dependencies, plugin_index, micro_service, micro_service_config)
if obj[8] != "NULL":
LIST_OBJ.append(full_obj_name + ":" + obj[8])
clear_list(LIST_FILES)
os.chdir("..")
if obj[9] != "NULL":
LIST_PARAM.append(full_obj_name + ":" + obj[9])
remove_folder(".repo")
# Parameter Table
if param_found == 1:
param_list = LIST_PARAM
for value in param_list:
val = value.split(":")
if val[1] == table_name:
parent_obj = val[0]
param = line.rstrip('\n').split(",")
param_name = parent_obj + param[0].replace("{", "").replace(
"\"", "").replace("BBF_VENDOR_PREFIX", BBF_VENDOR_PREFIX).replace(" ", "")
param_permission = param[1].replace("&", "").replace(" ", "")
param_type = param[2].replace(" ", "")
param_protocol = param[5].replace("}", "").replace(" ", "")
LIST_SUPPORTED_DM.append(
"{\"param\":\"" + param_name + "\",\"permission\":\"" + param_permission + "\",\"type\":\"" + param_type + "\",\"protocol\":\"" + param_protocol + "\"}")
if value not in LIST_DEL_PARAM:
LIST_DEL_PARAM.append(value)
fp.close()
def generate_dynamic_datamodel_tree(filename):
if filename.endswith('.c') is False:
return
obj_found = 0
fp = open(filename, 'r', encoding='utf-8')
for line in fp:
line = line.lstrip()
if line.startswith(tuple(LIST_IGNORED_LINE)) is True:
continue
if "DM_MAP_OBJ" in line:
obj_found = 1
continue
if obj_found == 0:
continue
if "{0}" in line.replace(" ", ""):
obj_found = 0
continue
# Object Table
if obj_found == 1:
obj = line.rstrip('\n').split(", ")
obj_name = obj[0][1:].replace("\"", "").replace(" ", "").replace("BBF_VENDOR_PREFIX", BBF_VENDOR_PREFIX)
if obj[1] != "NULL":
LIST_OBJ.append(obj_name + ":" + obj[1])
if obj[2] != "NULL":
LIST_PARAM.append(obj_name + ":" + obj[2].replace("},", "").replace(" ", ""))
fp.close()
def parse_dynamic_json_datamodel_tree(obj, value):
obj_permission = "DMWRITE" if get_option_value(
value, "access") is True else "DMREAD"
obj_protocols = get_protocol_from_json(value)
obj_description = get_description_from_json(value)
obj_name = obj.replace("{BBF_VENDOR_PREFIX}", BBF_VENDOR_PREFIX)
LIST_SUPPORTED_DM.append("{\"param\":\"" + obj_name + "\",\"permission\":\"" + obj_permission + "\",\"type\":\"DMT_OBJ\",\"protocol\":\"" + obj_protocols + "\",\"description\":\"" + obj_description + "\"}")
hasobj = obj_has_child(value)
hasparam = obj_has_param(value)
if hasparam and isinstance(value, dict):
for k, v in value.items():
if k != "mapping" and isinstance(v, dict):
for k1, v1 in v.items():
if k1 == "type" and v1 != "object":
param_name = obj_name + k.replace("{BBF_VENDOR_PREFIX}", BBF_VENDOR_PREFIX)
param_type = get_param_type(v)
param_permission = "DMWRITE" if get_option_value(
v, "write") is True else "DMREAD"
param_protocols = get_protocol_from_json(v)
param_list = get_list_from_json(v)
param_enums = get_enum_from_json(v)
param_desc = get_description_from_json(v)
param_range = get_range_from_json(v)
LIST_SUPPORTED_DM.append(
"{\"param\":\"" + param_name + "\",\"permission\":\"" + param_permission + "\",\"type\":\"" + param_type + "\",\"protocol\":\"" + param_protocols + "\",\"description\":\"" + param_desc + "\",\"list\":" + json.dumps(param_list) + ",\"range\":" + json.dumps(param_range) + ",\"enum\":" + json.dumps(param_enums) + "}")
break
if hasobj and isinstance(value, dict):
for k, v in value.items():
if isinstance(v, dict):
for k1, v1 in v.items():
if k1 == "type" and v1 == "object":
parse_dynamic_json_datamodel_tree(k, v)
def generate_dynamic_json_datamodel_tree(filename):
if filename.endswith('.json') is False:
return
json_file = open(filename, "r", encoding='utf-8')
data = json.loads(json_file.read(), object_pairs_hook=OrderedDict)
for obj, value in data.items():
if obj is None or obj.startswith('Device.') is False:
continue
parse_dynamic_json_datamodel_tree(obj, value)
print('Generating plugins completed.')
def generate_supported_dm(vendor_prefix=None, vendor_list=None, plugins=None):
'''
1/ Download Remote Data Model if needed
2/ Parse all Standard Data Model
3/ Parse all Vendor Data Model if needed
4/ Generate the list of Supported Data Model 'LIST_SUPPORTED_DM'
5/ Copy the supported data model in file 'DATA_MODEL_FILE'
Generates supported data models and performs necessary actions.
Args:
vendor_prefix (str, optional): Vendor prefix for shared libraries.
vendor_list (list, optional): List of vendor data models.
plugins (list, optional): List of plugin configurations.
'''
############## SET BBF VENDOR PREFIX ##############
if vendor_prefix is not None:
global BBF_VENDOR_PREFIX
BBF_VENDOR_PREFIX = vendor_prefix
# Build && Install bbfdm
build_and_install_bbfdm(vendor_prefix, vendor_list)
############## GEN Local BBF Data Models TREE ##############
print("Generating the local data models...")
# Download && Build Plugins Data Models
download_and_build_plugins(plugins, vendor_prefix)
cd_dir(BBF_DMTREE_PATH_TR181)
generate_datamodel_tree(BBF_TR181_ROOT_FILE)
# Run bbfdm daemon
run_command("/usr/sbin/bbfdmd")
for DIR in LIST_DM_DIR:
cd_dir(DIR)
for _root, _dirs, files in os.walk("."):
files.sort()
for filename in files:
if filename.endswith('.c') is False or filename == BBF_TR181_ROOT_FILE:
continue
# Fill the list supported data model
fill_list_supported_dm()
generate_datamodel_tree(filename)
############## GEN Vendors BBF Data Models TREE ##############
if vendor_list is not None and isinstance(vendor_list, list) and vendor_list:
cd_dir(BBF_DMTREE_PATH)
for vendor in vendor_list:
vendor_dir = f'vendor/{vendor}/tr181'
if os.path.isdir(vendor_dir):
cd_dir(vendor_dir)
generate_dynamic_datamodel_tree(BBF_VENDOR_ROOT_FILE)
if os.path.isfile(BBF_TR181_ROOT_FILE):
generate_datamodel_tree(BBF_TR181_ROOT_FILE)
for _root, _dirs, files in os.walk("."):
files.sort()
for filename in files:
if filename.endswith('.c') is False or filename == BBF_VENDOR_ROOT_FILE or filename == BBF_TR181_ROOT_FILE:
continue
generate_datamodel_tree(filename)
cd_dir(BBF_DMTREE_PATH)
############## Download && Generate Plugins Data Models ##############
global BBF_ERROR_CODE
if plugins is not None and isinstance(plugins, list) and plugins:
print("Generating datamodels from defined plugins...")
cd_dir(CURRENT_PATH)
if isinstance(plugins, list):
for plugin in plugins:
proto = get_option_value(plugin, "proto")
repo = get_option_value(plugin, "repo")
if repo is None:
BBF_ERROR_CODE += 1
continue
if proto is not None and proto == "local":
print(f' - Processing plugin: {plugin} at {repo}')
if os.path.isdir(f"{repo}"):
print(f' Processing {repo}')
dm_files = get_option_value(plugin, "dm_files")
if dm_files is not None and isinstance(dm_files, list):
for dm_file in dm_files:
if os.path.isfile(f"{repo}/{dm_file}"):
generate_dynamic_datamodel_tree(f"{repo}/{dm_file}")
generate_datamodel_tree(f"{repo}/{dm_file}")
generate_dynamic_json_datamodel_tree(f"{repo}/{dm_file}")
else:
BBF_ERROR_CODE += 1
else:
files = os.popen(f'find {repo}/ -name datamodel.c').read()
for file in files.split('\n'):
if os.path.isfile(file):
generate_dynamic_datamodel_tree(file)
generate_datamodel_tree(file)
files = os.popen(f'find {repo}/ -name "*.json"').read()
for file in files.split('\n'):
if os.path.isfile(file):
generate_dynamic_json_datamodel_tree(file)
else:
print(f' {repo} is not a directory !!!!!')
BBF_ERROR_CODE += 1
else:
print(f' - Processing plugin: {plugin}')
version = get_option_value(plugin, "version")
remove_folder(".repo")
try:
subprocess.run(["git", "clone", repo, ".repo"],
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, check = True)
except (OSError, subprocess.SubprocessError) as _e:
print(f' Failed to clone {repo} !!!!!')
BBF_ERROR_CODE += 1
if version is not None:
try:
subprocess.run(["git", "-C", ".repo", "checkout", version],
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, check=True)
except (OSError, subprocess.SubprocessError) as _e:
print(f' Failed to checkout git version {version} !!!!!')
BBF_ERROR_CODE += 1
if os.path.isdir(".repo"):
if version is None:
print(f' Processing {repo}')
else:
print(f' Processing {repo}^{version}')
dm_files = get_option_value(plugin, "dm_files")
if dm_files is not None and isinstance(dm_files, list):
for dm_file in dm_files:
if os.path.isfile(".repo/" + dm_file):
generate_dynamic_datamodel_tree(".repo/" + dm_file)
generate_datamodel_tree(".repo/" + dm_file)
generate_dynamic_json_datamodel_tree(".repo/" + dm_file)
else:
BBF_ERROR_CODE += 1
else:
files = os.popen('find .repo/ -name datamodel.c').read()
for file in files.split('\n'):
if os.path.isfile(file):
generate_dynamic_datamodel_tree(file)
generate_datamodel_tree(file)
files = os.popen('find .repo/ -name "*.json"').read()
for file in files.split('\n'):
if os.path.isfile(file):
generate_dynamic_json_datamodel_tree(file)
remove_folder(".repo")
else:
BBF_ERROR_CODE += 1
print('Generating of plugins done')
############## Remove Duplicated Element from List ##############
global LIST_SUPPORTED_DM
LIST_SUPPORTED_DM = list(set(LIST_SUPPORTED_DM))
############## Sort all elements in List ##############
LIST_SUPPORTED_DM.sort(reverse=False)
### Reorganize objects and params based on parent-child ###
reorganize_parent_child()
############## Back to the current directory ##############
cd_dir(CURRENT_PATH)
############### COPY SUPPORTED DATA MODEL TO FILE ###############
remove_file(DATA_MODEL_FILE)
fill_data_model_file()
def get_param_info_from_json(data, dm_json_files=None, info="description"):
arr = data.split(".")
list_data = []
if len(arr) == 0:
return None
for i in range(0, len(arr)):
string = ""
if i == 0:
string=arr[i] + "."
elif i == (len(arr) - 1):
string=arr[i]
else:
for j in range(0, i + 1):
string=string + arr[j]
string=string + "."
if len(string) != 0:
list_data.append(string)
if len(list_data) == 0:
return None
found = False
res = None
if dm_json_files is not None and isinstance(dm_json_files, list) and dm_json_files:
for fl in dm_json_files:
if os.path.exists(fl):
f = open(fl, 'r', encoding='utf-8')
try:
ob = json.load(f)
except json.decoder.JSONDecodeError:
continue
index = -1
for key in ob.keys():
if key in list_data:
index = list_data.index(key)
break
if index == -1:
continue
for i in range(index, len(list_data)):
if i != (len(list_data) - 1) and list_data[i + 1] == list_data[i] + "{i}.":
continue
try:
if str(list_data[i]).find(BBF_VENDOR_PREFIX) != -1:
param = str(list_data[i]).replace(BBF_VENDOR_PREFIX, "{BBF_VENDOR_PREFIX}")
else:
param = str(list_data[i])
ob = ob[param]
found = True
except KeyError:
found = False
break
if found is True:
try:
res = ob[info]
break
except KeyError:
res = None
return res
# kill all related bbfdm daemon
run_command("kill -9 $(pidof bbfdmd)")

View file

@ -19,15 +19,6 @@ def print_dm_usage():
exit(1)
def get_vendor_list(val):
vendor_list = ""
if isinstance(val, list):
for vendor in val:
vendor_list = vendor if not vendor_list else (
vendor_list + "," + vendor)
return vendor_list
### main ###
if len(sys.argv) < 2:
print_dm_usage()
@ -91,16 +82,13 @@ for option, value in json_data.items():
OUTPUT = value
continue
elif option == "root_node":
bbf.set_root_node(value)
continue
else:
print_dm_usage()
exit(1)
bbf.generate_supported_dm(VENDOR_PREFIX, VENDOR_LIST, PLUGINS)
file_format = bbf.get_option_value(OUTPUT, "file_format", ['xml'])
output_file_prefix = bbf.get_option_value(OUTPUT, "output_file_prefix", "datamodel")
output_dir = bbf.get_option_value(OUTPUT, "output_dir", "./out")
@ -115,7 +103,6 @@ if isinstance(file_format, list):
if isinstance(acs, list):
for acs_format in acs:
bbf.clean_supported_dm_list()
output_file_name = output_dir + '/' + output_file_prefix + '_' + acs_format + '.xml'
if acs_format == "hdm":
bbf_xml.generate_xml('HDM', DM_JSON_FILES, output_file_name)
@ -123,12 +110,9 @@ if isinstance(file_format, list):
if acs_format == "default":
bbf_xml.generate_xml('default', DM_JSON_FILES, output_file_name)
if _format == "xls":
bbf.clean_supported_dm_list()
output_file_name = output_dir + '/' + output_file_prefix + '.xls'
bbf_excel.generate_excel(['tr181', 'tr104'], output_file_name)
bbf.remove_file(bbf.DATA_MODEL_FILE)
print("Datamodel generation completed, aritifacts shall be available in out directory or as per input json configuration")
sys.exit(bbf.BBF_ERROR_CODE)

42
tools/generate_dm.sh Executable file
View file

@ -0,0 +1,42 @@
#!/bin/bash
# Set variables
CONTAINER_NAME="generate_dm_tools"
IMAGE_NAME="dev.iopsys.eu:5050/iopsys/gitlab-ci-pipeline/code-analysis:latest"
INPUT=""
root="${PWD/tools}"
usages()
{
echo "Usage: $0 [OPTIONS]..."
echo
echo " -I <docker image>"
echo " -i json input file path relative to top directory"
echo " -h help"
echo
echo
echo "examples:"
echo "~/git/bbfdm$ ./tools/generate_dm.sh -i tools/tools_input.json"
echo
}
runner()
{
# Create and start the Docker container
docker run --rm -it -v"${root}:/bbfdm" -w "/bbfdm" \
--entrypoint=/bin/bash --name "$CONTAINER_NAME" "$IMAGE_NAME" \
-c "./gitlab-ci/generate_supported_dm.sh /bbfdm/${1}"
}
while getopts n:I:i:h opts
do
case "${opts}" in
n) CONTAINER_NAME="${OPTARG}";;
I) IMAGE_NAME="${OPTARG}";;
i) INPUT="${OPTARG}";;
h) usages; exit 0;;
*) usages; exit 0;;
esac
done
runner ${INPUT}

View file

@ -12,183 +12,160 @@ import argparse
import xlwt
import bbf_common as bbf
LIST_USP_DM = []
LIST_CWMP_DM = []
LIST_DM = []
def getprotocols(value):
if isinstance(value, dict):
for obj, val in value.items():
if obj == "protocols" and isinstance(val, list):
if len(val) == 2:
return "CWMP+USP"
elif val[0] == "usp":
return "USP"
else:
return "CWMP"
return "CWMP+USP"
def is_param_obj_command_event_supported(dmobject):
for value in bbf.LIST_SUPPORTED_DM:
obj = json.loads(value)
param = bbf.get_option_value(obj, "param", None)
if param is None:
continue
def is_dm_supported(supported_dm_list, dmobject):
for entry in supported_dm_list:
param = entry.get("param")
if param == dmobject:
bbf.LIST_SUPPORTED_DM.remove(value)
supported_dm_list.remove(entry)
return "Yes"
return "No"
def add_data_to_list_dm(obj, supported, protocols, types, version):
rootdm = bbf.get_root_node()
if (rootdm):
if (obj.startswith(rootdm)):
LIST_DM.append(obj + "," + protocols + "," + supported + "," + types + "," + version)
else:
LIST_DM.append(obj + "," + protocols + "," + supported + "," + types + "," + version)
def add_data_to_list_dm(dm_list, obj, supported):
dm_list.append(obj + "," + supported)
def parse_standard_object(dmobject, value):
def parse_standard_object(list_read, list_write, dmobject, value, proto):
hasobj = bbf.obj_has_child(value)
hasparam = bbf.obj_has_param(value)
supported = is_param_obj_command_event_supported(dmobject)
version = bbf.get_option_value(value, "version", "2.0")
add_data_to_list_dm(dmobject, supported, getprotocols(value), "object", version)
if bbf.is_proto_exist(value, proto) is True:
supported = is_dm_supported(list_read, dmobject)
add_data_to_list_dm(list_write, dmobject, supported)
if hasparam:
if isinstance(value, dict):
for k, v in value.items():
if k == "mapping":
continue
if isinstance(v, dict):
for k1, v1 in v.items():
if k1 == "type" and v1 != "object":
if bbf.is_proto_exist(v, proto) is False:
continue
supported = is_dm_supported(list_read, dmobject + k)
add_data_to_list_dm(list_write, dmobject + k, supported)
break
if hasobj:
if isinstance(value, dict):
for k, v in value.items():
if isinstance(v, dict):
for k1, v1 in v.items():
if k1 == "type" and v1 == "object":
parse_standard_object(list_read, list_write, k, v, proto)
if hasparam:
if isinstance(value, dict):
for k, v in value.items():
if k == "mapping":
continue
if isinstance(v, dict):
for k1, v1 in v.items():
if k1 == "type" and v1 != "object":
supported = is_param_obj_command_event_supported(dmobject + k)
version = bbf.get_option_value(v, "version", "2.0")
add_data_to_list_dm(dmobject + k, supported, getprotocols(v), "operate" if "()" in k else "event" if "!" in k else "parameter", version)
break
def parse_vendor_object(list_read, list_write):
for entry in list_read:
param = entry.get("param")
add_data_to_list_dm(list_write, param, "Yes")
if hasobj:
if isinstance(value, dict):
for k, v in value.items():
if isinstance(v, dict):
for k1, v1 in v.items():
if k1 == "type" and v1 == "object":
parse_standard_object(k, v)
def parse_dynamic_object(dm_name_list):
if isinstance(dm_name_list, list) is False:
def load_json_data(dm_name):
JSON_FILE = bbf.ARRAY_JSON_FILES.get(dm_name, None)
if JSON_FILE is None:
print(f"!!!! {dm_name} : Data Model doesn't exist")
return None
for value in bbf.LIST_SUPPORTED_DM:
obj = json.loads(value)
param = bbf.get_option_value(obj, "param", None)
p_type = bbf.get_option_value(obj, "type", None)
version = bbf.get_option_value(obj, "version", "2.0")
if param is None or p_type is None:
continue
with open(JSON_FILE, "r", encoding='utf-8') as file:
return json.load(file, object_pairs_hook=OrderedDict)
for dm in dm_name_list:
def parse_object(dm_name_list, list_read, list_write, proto):
for dm in dm_name_list:
data = load_json_data(dm)
if data is not None:
for obj, value in data.items():
if obj is None:
print(f'!!!! {dm} : Wrong JSON Data model format!')
else:
parse_standard_object(list_read, list_write, obj, value, proto)
JSON_FILE = bbf.ARRAY_JSON_FILES.get(dm, None)
if JSON_FILE is None:
continue
if dm == "tr181" and ".Services." in param:
continue
if dm == "tr104" and ".Services." not in param:
continue
if dm == "tr135" and ".Services." not in param:
continue
dmType = "object" if p_type == "DMT_OBJ" else "parameter"
add_data_to_list_dm(param, "Yes", "CWMP+USP", dmType, version)
parse_vendor_object(list_read, list_write)
def parse_object_tree(dm_name_list):
if isinstance(dm_name_list, list) is False:
return None
for dm in dm_name_list:
# Usage for USP Data Model
LIST_SUPPORTED_USP_DM = bbf.LIST_SUPPORTED_USP_DM
parse_object(dm_name_list, LIST_SUPPORTED_USP_DM, LIST_USP_DM, "usp")
# Usage for CWMP Data Model
LIST_SUPPORTED_CWMP_DM = bbf.LIST_SUPPORTED_CWMP_DM[:]
parse_object(dm_name_list, LIST_SUPPORTED_CWMP_DM, LIST_CWMP_DM, "cwmp")
JSON_FILE = bbf.ARRAY_JSON_FILES.get(dm, None)
def generate_excel_sheet(sheet, title, data, style_mapping):
style_title = style_mapping["title"]
style_default = style_mapping["default"]
style_suffix = style_mapping["suffix"]
if JSON_FILE is not None:
file = open(JSON_FILE, "r", encoding='utf-8')
data = json.loads(file.read(), object_pairs_hook=OrderedDict)
sheet.write(0, 0, title, style_title)
sheet.write(0, 1, 'Supported', style_title)
for obj, value in data.items():
if obj is None:
print(f'!!!! {dm} : Wrong JSON Data model format!')
continue
for i, value in enumerate(data):
param = value.split(",")
suffix = None
parse_standard_object(obj, value)
for suffix_candidate, suffix_style in style_suffix.items():
if param[0].endswith(suffix_candidate):
suffix = suffix_style
break
style_name, style = suffix or (None, style_default)
if style_name is not None:
sheet.write(i + 1, 0, param[0], style_name)
else:
print(f"!!!! {dm} : Data Model doesn't exist")
sheet.write(i + 1, 0, param[0])
parse_dynamic_object(dm_name_list)
sheet.write(i + 1, 1, param[1], style)
sheet.col(0).width = 1300 * 20
sheet.col(1).width = 175 * 20
def generate_excel_file(output_file):
bbf.remove_file(output_file)
LIST_DM.sort(reverse=False)
LIST_USP_DM.sort(reverse=False)
LIST_CWMP_DM.sort(reverse=False)
wb = xlwt.Workbook(style_compression=2)
sheet = wb.add_sheet('CWMP-USP')
xlwt.add_palette_colour("custom_colour_yellow", 0x10)
xlwt.add_palette_colour("custom_colour_green", 0x20)
xlwt.add_palette_colour("custom_colour_grey", 0x30)
wb.set_colour_RGB(0x10, 255, 255, 153)
wb.set_colour_RGB(0x20, 102, 205, 170)
wb.set_colour_RGB(0x30, 153, 153, 153)
style_title = xlwt.easyxf(
'pattern: pattern solid, fore_colour custom_colour_grey;' + 'font: bold 1, color black;' + 'alignment: horizontal center;')
sheet.write(0, 0, 'OBJ/PARAM/OPERATE', style_title)
sheet.write(0, 1, 'Protocols', style_title)
sheet.write(0, 2, 'Version', style_title)
sheet.write(0, 3, 'Supported', style_title)
style_mapping = {
"title": xlwt.easyxf('pattern: pattern solid, fore_colour custom_colour_grey;' +
'font: bold 1, color black;' + 'alignment: horizontal center;'),
"default": xlwt.easyxf('alignment: horizontal center;'),
"suffix": {
".": (xlwt.easyxf('pattern: pattern solid, fore_colour custom_colour_yellow'),
xlwt.easyxf('pattern: pattern solid, fore_colour custom_colour_yellow;' +
'alignment: horizontal center;')),
"()" : (xlwt.easyxf('pattern: pattern solid, fore_colour custom_colour_green'),
xlwt.easyxf('pattern: pattern solid, fore_colour custom_colour_green;' +
'alignment: horizontal center;')),
"!" : (xlwt.easyxf('pattern: pattern solid, fore_colour custom_colour_green'),
xlwt.easyxf('pattern: pattern solid, fore_colour custom_colour_green;' +
'alignment: horizontal center;')),
}
}
usp_sheet = wb.add_sheet('USP')
generate_excel_sheet(usp_sheet, 'OBJ/PARAM/OPERATE/EVENT', LIST_USP_DM, style_mapping)
i = 0
for value in LIST_DM:
param = value.split(",")
i += 1
if param[3] == "object":
style_name = xlwt.easyxf(
'pattern: pattern solid, fore_colour custom_colour_yellow')
style = xlwt.easyxf(
'pattern: pattern solid, fore_colour custom_colour_yellow;' + 'alignment: horizontal center;')
elif param[3] == "operate" or param[3] == "event":
style_name = xlwt.easyxf(
'pattern: pattern solid, fore_colour custom_colour_green')
style = xlwt.easyxf(
'pattern: pattern solid, fore_colour custom_colour_green;' + 'alignment: horizontal center;')
else:
style_name = None
style = xlwt.easyxf('alignment: horizontal center;')
if style_name is not None:
sheet.write(i, 0, param[0], style_name)
else:
sheet.write(i, 0, param[0])
sheet.write(i, 1, param[1], style)
sheet.write(i, 2, param[4], style)
sheet.write(i, 3, param[2], style)
sheet.col(0).width = 1300*20
sheet.col(1).width = 175*20
sheet.col(2).width = 175*20
sheet.col(3).width = 175*20
cwmp_sheet = wb.add_sheet('CWMP')
generate_excel_sheet(cwmp_sheet, 'OBJ/PARAM', LIST_CWMP_DM, style_mapping)
wb.save(output_file)
@ -196,7 +173,6 @@ def generate_excel_file(output_file):
def generate_excel(dm_name_list, output_file="datamodel.xml"):
print("Generating BBF Data Models in Excel format...")
bbf.fill_list_supported_dm()
parse_object_tree(dm_name_list)
generate_excel_file(output_file)
@ -265,7 +241,6 @@ if __name__ == '__main__':
plugins.append(r)
bbf.generate_supported_dm(args.vendor_prefix, args.vendor_list, plugins)
bbf.clean_supported_dm_list()
generate_excel(args.datamodel, args.output)
print(f'Datamodel generation completed, aritifacts available in {args.output}')
sys.exit(bbf.BBF_ERROR_CODE)

View file

@ -20,16 +20,17 @@ PRODUCT_CLASS = "DG400PRIME"
MODEL_NAME = "DG400PRIME-A"
SOFTWARE_VERSION = "1.2.3.4"
ARRAY_TYPES = {"DMT_STRING": "string",
"DMT_UNINT": "unsignedInt",
"DMT_UNLONG": "unsignedLong",
"DMT_INT": "int",
"DMT_LONG": "long",
"DMT_BOOL": "boolean",
"DMT_TIME": "dateTime",
"DMT_HEXBIN": "hexBinary",
"DMT_BASE64": "base64"}
ARRAY_TYPES = [ "string",
"unsignedInt",
"unsignedLong",
"int",
"long",
"boolean",
"dateTime",
"hexBinary",
"base64"]
LIST_SUPPORTED_DM = []
def pretty_format(elem):
elem_string = ET.tostring(elem, 'UTF-8')
@ -37,6 +38,103 @@ def pretty_format(elem):
return reparsed.toprettyxml(indent=" ")
def organize_parent_child(dm_list):
organized_dm = []
for parent_item in dm_list:
parent_type = parent_item.get("type")
if parent_type != "object":
continue
parent_name = parent_item.get("param")
organized_dm.append(parent_item)
for child_item in dm_list:
child_type = child_item.get("type")
if child_type is None or child_type == "object":
continue
child_name = child_item.get("param")
if child_name.find(parent_name) != -1:
parent_dot_count = parent_name.count('.')
child_dot_count = child_name.count('.')
if parent_dot_count == child_dot_count:
organized_dm.append(child_item)
return organized_dm
def get_info_from_json(data, dm_json_files=None):
entry = {}
list_data = []
arr = data.split(".")
if len(arr) == 0:
return None
for i in range(0, len(arr)):
string = ""
if i == 0:
string=arr[i] + "."
elif i == (len(arr) - 1):
string=arr[i]
else:
for j in range(0, i + 1):
string=string + arr[j]
string=string + "."
if len(string) != 0:
list_data.append(string)
if len(list_data) == 0:
return entry
found = False
if dm_json_files is not None and isinstance(dm_json_files, list) and dm_json_files:
for fl in dm_json_files:
if os.path.exists(fl):
fo = open(fl, 'r', encoding='utf-8')
try:
ob = json.load(fo)
except json.decoder.JSONDecodeError:
continue
index = -1
for key in ob.keys():
if key in list_data:
index = list_data.index(key)
break
if index == -1:
continue
for i in range(index, len(list_data)):
if i != (len(list_data) - 1) and list_data[i + 1] == list_data[i] + "{i}.":
continue
try:
if str(list_data[i]).find("X_IOPSYS_EU_") != -1:
param = str(list_data[i]).replace("X_IOPSYS_EU_", "{BBF_VENDOR_PREFIX}")
else:
param = str(list_data[i])
ob = ob[param]
found = True
except KeyError:
found = False
break
if found is True:
entry["description"] = ob['description'] if "description" in ob else None
entry["enumerations"] = ob['enumerations'] if "enumerations" in ob else None
entry["range"] = ob['range'] if "range" in ob else None
entry["list"] = ob["list"] if "list" in ob else None
break
return entry
def generate_bbf_xml_file(output_file, dm_json_files=None):
global DM_OBJ_COUNT
global DM_PARAM_COUNT
@ -51,32 +149,20 @@ def generate_bbf_xml_file(output_file, dm_json_files=None):
root.set("file", "tr-181-2-16-0-cwmp-full.xml")
model = ET.SubElement(root, "model")
model.set("name", "Device:2.14")
model.set("name", "Device:2.16")
for value in bbf.LIST_SUPPORTED_DM:
for entry in LIST_SUPPORTED_DM:
name = entry.get("param")
p_type = entry.get("type")
access = entry.get("permission")
info = get_info_from_json(name, dm_json_files)
desc = info.get("description")
list_ob = info.get("list")
enum = info.get("enumerations")
rang = info.get("range")
obj = json.loads(value)
protocol = bbf.get_option_value(obj, "protocol", None)
if protocol is None or protocol == "BBFDM_USP":
continue
p_type = bbf.get_option_value(obj, "type", None)
if p_type is None:
continue
name = bbf.get_option_value(obj, "param", None)
permission = bbf.get_option_value(obj, "permission", None)
list_ob = bbf.get_option_value(obj, "list", None)
enum = bbf.get_option_value(obj, "enum", None)
desc = bbf.get_option_value(obj, "description", None)
rang = bbf.get_option_value(obj, "range", None)
if name is None or permission is None:
continue
access = "readOnly" if permission == "DMREAD" else "readWrite"
if p_type == "DMT_OBJ":
if p_type == "object":
# Object
objec = ET.SubElement(model, "object")
objec.set("name", name)
@ -85,8 +171,6 @@ def generate_bbf_xml_file(output_file, dm_json_files=None):
objec.set("maxEntries", "20")
ob_description = ET.SubElement(objec, "description")
if desc is None or len(desc) == 0:
desc = bbf.get_param_info_from_json(name, dm_json_files, "description")
ob_description.text = desc.replace("<", "{").replace(">", "}") if desc is not None else ""
DM_OBJ_COUNT += 1
@ -97,39 +181,21 @@ def generate_bbf_xml_file(output_file, dm_json_files=None):
parameter = ET.SubElement(objec, "parameter")
parameter.set("name", name[name.rindex('.')+1:])
parameter.set("access", access)
p_description = ET.SubElement(parameter, "description")
if desc is None or len(desc) == 0:
desc = bbf.get_param_info_from_json(name, dm_json_files, "description")
p_description.text = desc.replace("<", "{").replace(">", "}") if desc is not None else ""
syntax = ET.SubElement(parameter, "syntax")
if list_ob is None:
list_ob = bbf.get_param_info_from_json(name, dm_json_files, "list")
p_description = ET.SubElement(parameter, "description")
p_description.text = desc.replace("<", "{").replace(">", "}") if desc is not None else ""
syntax = ET.SubElement(parameter, "syntax")
if list_ob is not None and len(list_ob) != 0:
listtag = ET.SubElement(syntax, "list")
item_ob = None
maxsize = None
# Handle items in list
try:
item_ob = list_ob["item"]
except KeyError:
item_ob = None
item_ob = list_ob["item"] if "item" in list_ob else None
if item_ob is not None:
minval = None
maxval = None
try:
minval = item_ob["min"]
except KeyError:
minval = None
try:
maxval = item_ob["max"]
except KeyError:
maxval = None
minval = item_ob["min"] if "min" in item_ob else None
maxval = item_ob["max"]if "max" in item_ob else None
if minval is not None:
listtag.set("minItems", str(minval))
@ -138,35 +204,23 @@ def generate_bbf_xml_file(output_file, dm_json_files=None):
listtag.set("maxItems", str(maxval))
# Handle maxsize in list
try:
maxsize = list_ob["maxsize"]
except KeyError:
maxsize = None
maxsize = list_ob["maxsize"] if "maxsize" in list_ob else None
if maxsize is not None:
sizetag = ET.SubElement(listtag, "size")
sizetag.set("maxLength", str(maxsize))
if enum is None or len(enum) == 0:
try:
enum = list_ob["enumerations"]
except KeyError:
enum = None
enum = list_ob["enumerations"] if "enumerations" in list_ob else None
try:
list_datatype = list_ob["datatype"]
except KeyError:
list_datatype = None
list_datatype = list_ob["datatype"] if "datatype" in list_ob else None
if list_datatype is not None and list_datatype in ARRAY_TYPES.values():
if list_datatype is not None and list_datatype in ARRAY_TYPES:
subtype = ET.SubElement(syntax, list_datatype)
else:
subtype = ET.SubElement(syntax, ARRAY_TYPES.get(p_type, None))
subtype = ET.SubElement(syntax, p_type)
else:
subtype = ET.SubElement(syntax, ARRAY_TYPES.get(p_type, None))
if enum is None:
enum = bbf.get_param_info_from_json(name, dm_json_files, "enumerations")
subtype = ET.SubElement(syntax, p_type)
if enum is not None:
for val in enum:
@ -174,36 +228,15 @@ def generate_bbf_xml_file(output_file, dm_json_files=None):
enumeration.set("value", str(val))
# handle range
range_min = None
range_max = None
if rang is None:
try:
if list_ob is not None:
rang = list_ob["range"]
except KeyError:
rang = None
if rang is None:
rang = bbf.get_param_info_from_json(name, dm_json_files, "range")
if rang is None and list_ob is not None:
rang = list_ob["range"] if "range" in list_ob else None
if rang is not None and len(rang) != 0:
rang_len = len(rang)
for i in range(rang_len):
try:
range_min = rang[i]["min"]
except KeyError:
range_min = None
for i in range(len(rang)):
range_min = rang[i]["min"] if "min" in rang[i] else None
range_max = rang[i]["max"] if "max" in rang[i] else None
val_type = list_datatype if list_datatype is not None else p_type
try:
range_max = rang[i]["max"]
except KeyError:
range_max = None
if list_datatype is not None:
val_type = list_datatype
else:
val_type = ARRAY_TYPES.get(p_type, None)
if val_type == "string" or val_type == "hexBinary" or val_type == "base64":
size_tag = ET.SubElement(subtype, "size")
if range_min is not None:
@ -283,49 +316,34 @@ def generate_hdm_xml_file(output_file):
attributeLength = ET.SubElement(attribute_visibility, "attributeLength")
attributeLength.text = str("64")
#param_array = np.empty(15, dtype=ET.Element)
param_array = [ET.Element] * 15
param_array[0] = parameters
root_dot_count = bbf.get_root_node().count('.') - 1 if (bbf.get_root_node()) else 0
root_dot_count = 0
for value in bbf.LIST_SUPPORTED_DM:
for entry in LIST_SUPPORTED_DM:
obj = json.loads(value)
protocol = bbf.get_option_value(obj, "protocol", None)
if protocol is None or protocol == "BBFDM_USP":
continue
name = entry.get("param")
p_type = entry.get("type")
p_type = bbf.get_option_value(obj, "type", None)
if p_type is None:
continue
name = bbf.get_option_value(obj, "param", None)
permission = bbf.get_option_value(obj, "permission", None)
if name is None or permission is None:
continue
if p_type == "DMT_OBJ":
if p_type == "object":
# Object
obj_tag = ET.SubElement(
param_array[name.replace(".{i}", "").count('.') - root_dot_count -1], "parameter")
obj_tag = ET.SubElement(param_array[name.replace(".{i}", "").count('.') - root_dot_count -1], "parameter")
obj_name = ET.SubElement(obj_tag, "parameterName")
obj_name.text = str(name.replace(".{i}", "").split('.')[-2])
obj_type = ET.SubElement(obj_tag, "parameterType")
obj_type.text = str("object")
obj_array = ET.SubElement(obj_tag, "array")
obj_array.text = str(
"true" if name.endswith(".{i}.") else "false")
obj_array.text = str("true" if name.endswith(".{i}.") else "false")
parameters = ET.SubElement(obj_tag, "parameters")
param_array[name.replace(".{i}", "").count('.') - root_dot_count] = parameters
DM_OBJ_COUNT += 1
else:
# Parameter
param_tag = ET.SubElement(
param_array[name.replace(".{i}", "").count('.') - root_dot_count], "parameter")
param_tag = ET.SubElement(param_array[name.replace(".{i}", "").count('.') - root_dot_count], "parameter")
param_name = ET.SubElement(param_tag, "parameterName")
param_name.text = str(name[name.rindex('.')+1:])
param_type = ET.SubElement(param_tag, "parameterType")
param_type.text = str(ARRAY_TYPES.get(p_type, None))
param_type.text = str(p_type)
DM_PARAM_COUNT += 1
xml_file = open(output_file, "w", encoding='utf-8')
@ -333,14 +351,16 @@ def generate_hdm_xml_file(output_file):
xml_file.close()
def generate_xml(acs = 'default', dm_json_files=None, output_file="datamodel.xml"):
global LIST_SUPPORTED_DM
global DM_OBJ_COUNT
global DM_PARAM_COUNT
DM_OBJ_COUNT = 0
DM_PARAM_COUNT = 0
LIST_SUPPORTED_DM = organize_parent_child(bbf.LIST_SUPPORTED_CWMP_DM)
print(f'Generating BBF Data Models in xml format for {acs} acs...')
bbf.fill_list_supported_dm()
if acs == "HDM":
generate_hdm_xml_file(output_file)
@ -463,7 +483,6 @@ if __name__ == '__main__':
plugins.append(r)
bbf.generate_supported_dm(args.vendor_prefix, args.vendor_list, plugins)
bbf.clean_supported_dm_list()
generate_xml(args.format, args.dm_json_files, args.output)
print(f'Datamodel generation completed, aritifacts available in {args.output}')
sys.exit(bbf.BBF_ERROR_CODE)

View file

@ -5,30 +5,37 @@
"product_class": "DG400PRIME",
"model_name": "DG400PRIME-A",
"software_version": "1.2.3.4",
"root_node": "Device.",
"vendor_list": [
"iopsys"
],
"dm_json_files": [
"../libbbfdm/dmtree/json/tr181.json",
"../libbbfdm/dmtree/vendor/iopsys/vendor.json"
"libbbfdm/dmtree/json/tr181.json",
"libbbfdm/dmtree/json/tr104.json",
"libbbfdm/dmtree/vendor/iopsys/vendor.json"
],
"vendor_prefix": "X_IOPSYS_EU_",
"plugins": [
{
"repo": "https://dev.iopsys.eu/bbf/icwmp.git",
"version": "devel",
"dm_files": [
"src/cwmp_dm/datamodel.c"
]
},
{
"repo": "https://dev.iopsys.eu/bbf/bulkdata.git",
"proto": "git",
"dm_files": [
"bbf_plugin/bulkdata.json"
]
},
{
"repo": "https://dev.iopsys.eu/bbf/xmppc.git",
"proto": "git"
"dm_files": [
"src/datamodel.c"
]
},
{
"repo": "https://dev.iopsys.eu/bbf/stunc.git",
"proto": "git",
"version": "devel",
"dm_files": [
"src/datamodel.c"
@ -36,7 +43,6 @@
},
{
"repo": "https://dev.iopsys.eu/bbf/udpecho.git",
"proto": "git",
"version": "devel",
"dm_files": [
"src/datamodel.c"
@ -44,7 +50,6 @@
},
{
"repo": "https://dev.iopsys.eu/bbf/twamp-light.git",
"proto": "git",
"version": "devel",
"dm_files": [
"src/datamodel.c"
@ -52,7 +57,6 @@
},
{
"repo": "https://dev.iopsys.eu/bbf/periodicstats.git",
"proto": "git",
"version": "devel",
"dm_files": [
"bbf_plugin/bbf_plugin.c"
@ -60,7 +64,6 @@
},
{
"repo": "https://dev.iopsys.eu/feed/iopsys.git",
"proto": "git",
"version": "devel",
"dm_files": [
"urlfilter/files/etc/bbfdm/json/urlfilter.json",
@ -71,7 +74,6 @@
},
{
"repo": "https://dev.iopsys.eu/lcm/swmodd.git",
"proto": "git",
"version": "devel",
"dm_files": [
"src/datamodel.c"
@ -79,23 +81,16 @@
},
{
"repo": "https://dev.iopsys.eu/bbf/usermngr.git",
"proto": "git",
"version": "devel",
"extra_dependencies": [
"-lcrypt"
],
"dm_files": [
"src/users.c"
]
},
{
"repo": "https://dev.iopsys.eu/bbf/icwmp.git",
"proto": "git",
"version": "devel",
"dm_files": [
"src/cwmp_dm/datamodel.c"
]
},
{
"repo": "https://dev.iopsys.eu/iopsys/hostmngr.git",
"proto": "git",
"version": "devel",
"dm_files": [
"src/bbf_plugin/hosts.c"
@ -103,10 +98,37 @@
},
{
"repo": "https://dev.iopsys.eu/bbf/timemngr.git",
"proto": "git",
"version": "devel",
"dm_files": [
"src/times.c"
],
"micro-service": {
"name": "bbfdm.time",
"parent_dm": "Device.",
"object": "Time",
"root_obj": "bbfdm"
}
},
{
"repo": "https://dev.iopsys.eu/voice/tr104.git",
"version": "devel",
"dm_files": [
"libdm/tr104/*.c"
],
"extra_dependencies": [
"-I libdmtree",
"-I libdm/common"
]
},
{
"repo": "https://dev.iopsys.eu/voice/tr104.git",
"version": "devel",
"dm_files": [
"libdm/extensions/iowrt/*.c"
],
"extra_dependencies": [
"-I libdmtree",
"-I libdm/common"
]
}
],