| /scripts/pylib/power-twister-harness/utils/ |
| A D | UtilityFunctions.py | 27 def calculate_rms(data): argument 35 data_array = np.array(data, dtype=np.float64) # Convert to float64 to avoid type issues 42 def bytes_to_twobyte_values(data): argument 43 value = int.from_bytes(data[0], 'big') << 8 | int.from_bytes(data[1], 'big') 72 def current_RMS(data, trim=100, num_peaks=1, peak_height=0.008, peak_distance=40, padding=40): argument 104 data = data[trim:] 107 data = [float(x) for x in data] 110 peaks = signal.find_peaks(data, distance=peak_distance, height=peak_height)[0] 122 indices = np.concatenate(([0], np.array(peaks), [len(data)])) 130 split_data.append(data[start_idx:end_idx])
|
| /scripts/pylib/twister/twisterlib/ |
| A D | platform.py | 26 def __init__(self, data: dict[str, str]): 27 assert "name" in data 28 assert data["name"] in SUPPORTED_SIMS 29 self.name = data["name"] 30 self.exec = data.get("exec") 142 Simulator(data) for data in variant_data.get( 220 data = None 221 dir2data[board_dir] = data 255 if data is None: 279 data = dir2data[board.dir] [all …]
|
| A D | config_parser.py | 100 self.data: dict[str, Any] = {} 105 data = scl.yaml_load_verify(self.filename, self.schema) 106 self.data = data 108 if 'tests' in self.data: 109 self.scenarios = self.data['tests'] 110 if 'common' in self.data: 111 self.common = self.data['common'] 112 return data
|
| /scripts/footprint/ |
| A D | upload_data.py | 31 for d in data.keys(): 43 "value": data[d] 72 data = {} 81 data['all'] = node.size 83 data[node.name] = node.size 92 data['all'] = node.size 94 data[comp] = node.size 96 return data 121 data = parse_file(file) 139 if args.data and args.zephyr_base: [all …]
|
| A D | compare_footprint | 161 data = [] 165 data.append(row) 166 return data 232 for type, data in {'base': base_results, 'current': current_results}.items(): 234 for row in data: 278 for platform, data in deltas[test].items(): 280 for metric, value in data.items():
|
| /scripts/ci/ |
| A D | version_mgr.py | 46 data = None 52 data = json.load(fp) 53 return data 67 data = get_versions() 68 for item in data: 89 data = get_versions() 90 latest = data[-1] 113 data = get_versions() 128 if data and not published: 134 data.append(item) [all …]
|
| /scripts/tests/twister/ |
| A D | test_twister.py | 38 data = TwisterConfigParser(filename, schema) 39 data.load() 40 assert data 58 data = TwisterConfigParser(filename, schema) 59 data.load() 63 scenario = data.get_scenario("test_config.main")
|
| /scripts/ci/es_upload/ |
| A D | README.md | 3 This directory contains [ElasticSearch data store](https://github.com/elastic/elasticsearch) 10 to proper data types, eventually to store the expected document structure. 12 and the corresponding data scheme in the same source code repository. 22 Tune resulting data scheme and size depending on your particular needs. 25 to check the resulting data without its actual upload. 45 exclude excess data, extract substrings by regular expressions, change data structure 89 Store test results with `recording` data entries, for example from 100 Upload data with 'flattened' test suites creating documents for each `recording` data entry. 110 ### Twister test with recording and extracting more data 124 Upload data with 'flattened' test suites creating documents for each `record` data entry [all …]
|
| /scripts/pylib/build_helpers/ |
| A D | domains.py | 63 data = yaml.safe_load(domains_yaml) 64 pykwalify.core.Core(source_data=data, 70 self._build_dir = data['build_dir'] 73 for d in data['domains'] 80 self._default_domain = self.get_domain(data['default']) 81 self._flash_order = self.get_domains(data.get('flash_order', []))
|
| /scripts/logging/dictionary/dictionary_parser/ |
| A D | utils.py | 30 data = section['data'] 39 while (offset < max_offset) and (data[offset] != 0): 40 ret_str += chr(data[offset])
|
| /scripts/logging/dictionary/ |
| A D | live_log_parser.py | 122 data = b'' 135 data += reader.read_non_blocking() 136 parsed_data_offset = parserlib.parser(data, log_parser, logger) 137 data = data[parsed_data_offset:]
|
| /scripts/coredump/coredump_parser/ |
| A D | log_parser.py | 96 data = self.fd.read(num_bytes) 98 self.threads_metadata = {"hdr_ver" : hdr_ver, "data" : data} 119 data = self.fd.read(struct.calcsize(ptr_fmt)) 120 saddr, eaddr = struct.unpack(ptr_fmt, data) 124 data = self.fd.read(size) 126 mem = {"start": saddr, "end": eaddr, "data": data}
|
| /scripts/pylib/twister/ |
| A D | scl.py | 61 def _yaml_validate(data, schema): argument 64 c = pykwalify.core.Core(source_data=data, schema_data=schema) 69 def _yaml_validate(data, schema): argument
|
| /scripts/pylib/power-twister-harness/stm32l562e_dk/ |
| A D | PowerShield.py | 320 data = [] 321 data.append(first_byte) 325 data.append(second_byte) 327 UtilityFunctions.bytes_to_twobyte_values(data) 431 data = self.dataQueue.get() 432 writer.writerow(data) 464 self.power_shield_data.data.append(row[0]) 467 self.power_shield_data.data 473 for data in self.power_shield_data.data: 475 float(data) * float(_delta_time) * float(self.target_voltage) [all …]
|
| /scripts/pylib/pytest-twister-harness/src/twister_harness/device/ |
| A D | device_adapter.py | 144 data = self._read_from_queue(timeout) 150 logger.debug('#: %s', data) 151 return data 214 def write(self, data: bytes) -> None: 220 self._write_to_device(data) 256 data: str | object = self._device_read_queue.get(timeout=timeout) 259 return data 320 def _write_to_device(self, data: bytes) -> None:
|
| /scripts/dts/python-devicetree/tests/ |
| A D | test_edtlib.py | 77 …edtlib.ControllerAndData(node=node, controller=controller_0, data={'one': 1}, name=None, basename=… 78 …edtlib.ControllerAndData(node=node, controller=controller_1, data={'one': 2, 'two': 3}, name=None,… 88 …edtlib.ControllerAndData(node=node, controller=controller_0, data={'one': 0}, name=None, basename=… 89 …edtlib.ControllerAndData(node=node, controller=controller_1, data={'one': 0, 'two': 1}, name=None,… 95 …edtlib.ControllerAndData(node=node, controller=controller_0, data={'one': 3}, name=None, basename=… 102 …edtlib.ControllerAndData(node=node, controller=controller_0, data={'one': 0}, name=None, basename=… 109 …edtlib.ControllerAndData(node=node, controller=controller_0, data={'one': 0}, name=None, basename=… 116 …edtlib.ControllerAndData(node=node, controller=controller_0, data={'one': 3}, name=None, basename=… 123 …edtlib.ControllerAndData(node=node, controller=controller_0, data={'one': 3}, name=None, basename=… 1017 controller, data = expected [all …]
|
| /scripts/pylib/display-twister-harness/ |
| A D | test_display.py | 24 data = yaml.safe_load(yaml_file) 26 return data.get('test', {})
|
| /scripts/pylib/shell-twister-harness/ |
| A D | test_shell.py | 21 data = yaml.safe_load(yaml_file) 22 for entry in data:
|
| /scripts/coccinelle/ |
| A D | find_dev_usage.cocci | 33 // Loading function data base 35 data = pickle.load(f) 36 f_void = data["f_void"] 37 f_other = data["f_other"]
|
| A D | find_functions.cocci | 121 data = {} 122 data['f_void'] = f_void 123 data['f_other'] = f_other 124 pickle.dump(data, f, pickle.HIGHEST_PROTOCOL)
|
| /scripts/ |
| A D | list_hardware.py | 48 data = yaml.load(soc_yaml, Loader=SafeLoader) 49 SOC_VALIDATOR.source = data 54 for f in data.get('family', []): 74 for s in data.get('series', []): 84 for soc in data.get('socs', []): 102 if 'runners' in data and 'run_once' in data['runners']: 103 for grp in data['runners']['run_once']: 104 for item_data in data['runners']['run_once'][grp]:
|
| /scripts/build/ |
| A D | elf_parser.py | 25 self.data = self.elf.symbol_data(sym) 32 return struct.unpack(format, self.data[offset:offset + size])[0] 62 format += "{:d}h".format(len(self.data) // 2) 63 self._ordinals = struct.unpack(format, self.data) 162 data = section.data() 166 assert offset + length <= len(data) 168 return bytes(data[offset:offset + length])
|
| /scripts/tracing/ |
| A D | trace_capture_uart.py | 48 data = ser.read() 49 file_desc.write(data)
|
| /scripts/net/ |
| A D | enumerate_http_status.py | 54 def handle_data(self, data): argument 56 self.current_data += data
|
| /scripts/pylib/power-twister-harness/ |
| A D | test_power.py | 36 data = probe.get_data() 40 data,
|