13.2. framework package

13.2.1. framework.basic_primitives module

framework.basic_primitives.calc_parity_bit(x)

return 0 if the number of bits is even, otherwise returns 1

framework.basic_primitives.corrupt_bits(s, p=0.01, n=None, ascii=False)

Flip a given percentage or number of bits from a string

framework.basic_primitives.corrupt_bytes(s, p=0.01, n=None, ctrl_char=False)

Corrupt a given percentage or number of bytes from a string

framework.basic_primitives.rand_string(size=None, min=1, max=10, str_set='0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ!"#$%&\'()*+,-./:;<=>?@[\\]^_`{|}~ \t\n\r\x0b\x0c')

13.2.2. framework.data module

class framework.data.AttrGroup(attrs_desc)

Bases: object

__copy__()
__init__(attrs_desc)
__module__ = 'framework.data'
clear(name)
copy_from(attr_group)
is_set(name)
set(name)
class framework.data.CallBackOps(remove_cb=False, stop_process_cb=False, ignore_no_data=False)

Bases: object

Add_PeriodicData = 10
Del_PeriodicData = 11
ForceDataHandling = 3
RemoveCB = 1
Replace_Data = 30
Set_FbkTimeout = 21
Start_Task = 12
StopProcessingCB = 2
Stop_Task = 13
__init__(remove_cb=False, stop_process_cb=False, ignore_no_data=False)
__module__ = 'framework.data'
add_operation(instr_type, id=None, param=None, period=None)
get_operations()
is_flag_set(name)
set_flag(name)
class framework.data.Data(content=None, altered=False, tg_ids=None, description=None)

Bases: object

__copy__()
__init__(content=None, altered=False, tg_ids=None, description=None)
__module__ = 'framework.data'
__repr__()

Return repr(self).

__str__()

Return str(self).

_empty_data_backend = <framework.data.EmptyBackend object>
add_info(info_str)
bind_info(dmaker_type, data_maker_name)
cleanup_all_callbacks()
cleanup_callbacks(hook=HOOK.after_fbk)
property content
copy_callback_from(data)
generate_info_from_content(data=None, origin=None, additional_info=None)
get_content(do_copy=False)
get_data_id()
get_data_model()
get_history()
get_initial_dmaker()
get_length()
has_info()
has_node_content()
has_raw_content()
info_exists(dmaker_type, data_maker_name, info)
is_blocked()
is_empty()
is_recordable()
is_unusable()
make_blocked()
make_free()
make_recordable()
make_unusable()
property origin
pending_callback_ops(hook=HOOK.after_fbk)
pretty_print(raw_limit=200, log_func=<built-in method write of _io.TextIOWrapper object>)
read_info(dmaker_type, data_maker_name)
register_callback(callback, hook=HOOK.after_fbk)
remove_info_from(dmaker_type, data_maker_name)
reset_history()
run_callbacks(feedback=None, hook=HOOK.after_fbk)
set_attributes_from(attr_group)
set_basic_attributes(from_data=None)
set_data_id(data_id)
set_data_model(dm)
set_history(hist)
set_initial_dmaker(t)
show(raw_limit=200, log_func=<built-in method write of _io.TextIOWrapper object>)
take_info_ownership(keep_previous_info=True)
property tg_ids
to_bytes()
to_formatted_str()
to_str()
update_from(obj)
class framework.data.DataAttr(attrs_to_set=None, attrs_to_clear=None)

Bases: framework.data.AttrGroup

Reset_DMakers = 1
__annotations__ = {}
__init__(attrs_to_set=None, attrs_to_clear=None)
__module__ = 'framework.data'
class framework.data.DataBackend(content=None)

Bases: object

__init__(content=None)
__module__ = 'framework.data'
property content
property data_maker_name
property data_maker_type
property data_model
get_content(do_copy=False, materialize=True)
get_length()
show(raw_limit=200, log_func=<built-in method write of _io.TextIOWrapper object>)
to_bytes()
to_str()
update_from(obj)
class framework.data.DataProcess(process, seed=None, tg_ids=None, auto_regen=False)

Bases: object

__copy__()
__init__(process, seed=None, tg_ids=None, auto_regen=False)

Describe a process to generate a data.

Parameters
  • process (list) – List of disruptors (possibly complemented by parameters) to apply to a seed. However, if the list begin with a generator, the disruptor chain will apply to the outcome of the generator. The generic form for a process is: [action_1, (action_2, UI_2), ... action_n] where action_N can be either: dmaker_type_N or (dmaker_type_N, dmaker_name_N)

  • seed – (Optional) Can be a registered framework.data_model.Node name or a framework.data_model.Data. Will be provided to the first disruptor in the disruptor chain (described by the parameter process) if it does not begin with a generator.

  • tg_ids (list) – Virtual ID list of the targets to which the outcomes of this data process will be sent. If None, the outcomes will be sent to the first target that has been enabled. In the context of scenario, it embeds virtual IDs.

  • auto_regen (boolean) – If True, the data process will be in a state requesting the framework to rerun the data maker chain after a disruptor yielded (meaning it is exhausted with the data provided to it). It will make the chain going on with new data coming either from the first non-exhausted disruptor (preceding the exhausted one), or from the generator if all disruptors are exhausted. If False, the data process won’t be in this state and the framework won’t rerun the data maker chain once a disruptor yield. It means the framework will alert about this issue to the end-user, or when used within a Scenario, it will redirect the decision to the scenario itself (this condition may trigger a transition in the scenario).

__module__ = 'framework.data'
__repr__()

Return repr(self).

append_new_process(process)

Append a new process to the list.

formatted_str(oneliner=False)
make_blocked()
make_free()
next_process()
property process
property process_qty
reset()
class framework.data.EmptyBackend(content=None)

Bases: framework.data.DataBackend

__annotations__ = {}
__module__ = 'framework.data'
property content
property data_maker_name
property data_maker_type
property data_model
get_content(do_copy=False, materialize=True)
get_length()
to_bytes()
to_formatted_str()
to_str()
class framework.data.EmptyDataProcess(seed=None, tg_ids=None, auto_regen=False)

Bases: object

__init__(seed=None, tg_ids=None, auto_regen=False)
__module__ = 'framework.data'
class framework.data.NodeBackend(content=None)

Bases: framework.data.DataBackend

__annotations__ = {}
__copy__()
__module__ = 'framework.data'
property content
property data_maker_name
property data_maker_type
get_content(do_copy=False, materialize=True)
get_length()
show(raw_limit=200, log_func=<built-in method write of _io.TextIOWrapper object>)
to_bytes()
to_formatted_str()
to_str()
update_from(obj: framework.node.Node)
class framework.data.RawBackend(content=None)

Bases: framework.data.DataBackend

__annotations__ = {}
__module__ = 'framework.data'
property content
property data_maker_name
property data_maker_type
get_content(do_copy=False, materialize=True)
get_length()
show(raw_limit=200, log_func=<built-in method write of _io.TextIOWrapper object>)
to_bytes()
to_formatted_str()
to_str()
update_from(obj)

13.2.3. framework.data_model module

class framework.data_model.DataModel

Bases: object

Data Model Abstraction

__init__()
__module__ = 'framework.data_model'
__str__()

Return str(self).

_atom_absorption_additional_actions(atom)

Called by .create_atom_from_raw_data(). Should be overloaded if specific actions need to be performed on the atoms created from imported raw data

Parameters

atom – Atom that are going to be registered after being absorbed from raw data

Returns

An atom and a short description of the actions

_backend(atom)
_create_atom_from_raw_data_specific(data, idx, filename)

Overload this method when creating a node from binary strings need more actions than performing a node absorption.

Parameters
  • data (bytes) – file content

  • idx (int) – index of the imported file

  • filename (str) – name of the imported file

Returns

An atom or None

absorb(data, scope=None, atom_name=None, requested_abs_csts=None)
Parameters
  • data

  • atom_name (str) – requested atom name for the decoding (linked to self.register_atom_for_decoding)

  • scope (str) – requested scope for the decoding (linked to self.register_atom_for_decoding)

  • requested_abs_csts

Returns

Node which is the result of the absorption or None

Return type

Node

atom_identifiers()
build_data_model()

This method is called when a data model is loaded. It is called only the first time the data model is loaded. To be implemented by the user.

cleanup()
create_atom_from_raw_data(data, idx, filename)

This function is called for each files (with the right extension) present in imported_data/<data_model_name> and absorb their content by leveraging the atoms of the data model registered for absorption or if none are registered, either call the method _create_atom_from_raw_data_specific() if it is defined or wrap their content in a framework.node.Node.

Parameters
  • data (bytes) – file content

  • idx (int) – index of the imported file

  • filename (str) – name of the imported file

Returns

An atom or None

customize_node_backend(default_gen_custo=None, default_nonterm_custo=None)
decode(data, scope=None, atom_name=None, requested_abs_csts=None, colorized=True)
Parameters
  • data

  • atom_name (str) – requested atom name for the decoding (linked to self.register_atom_for_decoding)

  • scope (str) – requested scope for the decoding (linked to self.register_atom_for_decoding)

  • requested_abs_csts

  • colorized

Returns

Node which is the result of the absorption or None and Textual description of the result

Return type

tuple

file_extension = 'bin'
get_atom(hash_key, name=None)
get_atom_for_absorption(hash_key)
get_external_atom(dm_name, data_id, name=None)
get_import_directory_path(subdir=None)
import_file_contents(extension=None, absorber=None, subdir=None, path=None, filename=None)
property included_models
knowledge_source = None
load_data_model(dm_db)
merge_with(data_model)
name = None
pre_build()

This method is called when a data model is loaded. It is executed before build_data_model(). To be implemented by the user.

register(*atom_list)
register_atom_for_decoding(atom, absorb_constraints=AbsFullCsts(), decoding_scope=None)

Register an atom that will be used by the DataModel when an operation requiring data absorption is performed, like self.decode().

Parameters
  • atom – Atom to register for absorption

  • absorb_constraints – Constraints to be used for the absorption

  • decoding_scope – Should be either an atom name that can be absorbed by the registered atom, or a textual description of the scope, or a list of the previous elements. If set to None, the atom will be the default one used for decoding operation if no other nodes exist with a specific scope.

show()
update_atom(atom)
validation_tests()

Optional test cases to validate the correct behavior of the data model

Returns

True if the validation succeeds. False otherwise

Return type

bool

class framework.data_model.NodeBackend(data_model)

Bases: object

__init__(data_model)
__module__ = 'framework.data_model'
atom_copy(orig_atom, new_name=None)
default_gen_custo = None
default_nonterm_custo = None
get_all_confs()
merge_with(node_backend)
prepare_atom(atom)
update_atom(atom, existing_env=False)

13.2.4. framework.node module

class framework.node.BitFieldCondition(sf, val=None, neg_val=None, gt_val=None, lt_val=None)

Bases: framework.node.NodeCondition

__init__(sf, val=None, neg_val=None, gt_val=None, lt_val=None)
Parameters
  • sf (int/list of int) – subfield(s) of the BitField() on which the condition apply

  • val (int/list of int/list of list of int) – integer(s) that satisfies the condition(s)

  • neg_val (int/list of int/list of list of int) – integer(s) that does NOT satisfy the condition(s) (AND clause)

  • gt_val (int/list of int/list of list of int) – condition met if subfield(s) greater than or equal to values in this field (AND clause)

  • lt_val (int/list of int/list of list of int) – condition met if subfield(s) lesser than or equal to values in this field (AND clause)

__module__ = 'framework.node'
check(node)
class framework.node.DJobGroup(node_list)

Bases: object

__id__()
__init__(node_list)
__iter__()
__module__ = 'framework.node'
__repr__()

Return repr(self).

__reversed__()
class framework.node.DynNode_Helpers

Bases: object

__copy__()
__init__()
__module__ = 'framework.node'
_get_graph_info()
_update_dyn_helper(env)
clear_graph_info_since(node)
determinist = True
property graph_info
make_private(env=None)
reset_graph_info()
set_graph_info(node, info)
class framework.node.Env

Bases: object

__copy__()
__getattr__(name)
__init__()
__module__ = 'framework.node'
add_node_to_corrupt(node, corrupt_type=None, corrupt_op=<function Env.<lambda>>)
cleanup_basic_djobs(prio)
cleanup_remaining_djobs(prio)
clear_all_exhausted_nodes()
clear_exhausted_node(node)
property color_enabled
property delayed_jobs_pending
disable_color()
djobs_exists(prio)
enable_color()
execute_basic_djobs(prio)
exhausted_node_exists()
exhausted_nodes_amount()
get_all_djob_groups(prio)
get_basic_djobs(prio)
get_data_model()
get_djobs_by_gid(group_id, prio)
get_exhausted_nodes()
is_djob_registered(key, prio)
is_empty()
is_node_exhausted(node)
knowledge_source = None
notify_exhausted_node(node)
register_basic_djob(func, args, prio=1)
register_djob(func, group, key, cleanup=None, args=None, prio=1)
remove_djob(group, key, prio)
remove_node_to_corrupt(node)
set_data_model(dm)
update_node_refs(node_dico, ignore_frozen_state)
class framework.node.Env4NT

Bases: object

Define methods for non-terminal nodes

__copy__()
__init__()
__module__ = 'framework.node'
clear_drawn_node_attrs(node_id)
get_drawn_node_qty(node_id)
get_drawn_node_sz(node_id)
is_empty()
node_exists(node_id)
reset()
set_drawn_node_attrs(node_id, nb, sz)
update_node_ids(id_list)
class framework.node.FuncCusto(items_to_set=None, items_to_clear=None, transform_func=None)

Bases: framework.node.NodeCustomization

Function node behavior-customization To be provided to NodeInternals.customize()

CloneExtNodeArgs = 2
FrozenArgs = 1
__module__ = 'framework.node'
_custo_items = {1: True, 2: False}
property clone_ext_node_args_mode
property frozen_args_mode
class framework.node.GenFuncCusto(items_to_set=None, items_to_clear=None, transform_func=None)

Bases: framework.node.NodeCustomization

Generator node behavior-customization To be provided to NodeInternals.customize()

CloneExtNodeArgs = 2
ForwardConfChange = 1
ResetOnUnfreeze = 3
TriggerLast = 4
__annotations__ = {}
__module__ = 'framework.node'
_custo_items = {1: True, 2: False, 3: True, 4: False}
property clone_ext_node_args_mode
property forward_conf_change_mode
property reset_on_unfreeze_mode
property trigger_last_mode
class framework.node.IntCondition(val=None, neg_val=None, gt_val=None, lt_val=None)

Bases: framework.node.NodeCondition

__annotations__ = {}
__init__(val=None, neg_val=None, gt_val=None, lt_val=None)
Parameters
  • val (int/list of int) – integer(s) that satisfies the condition

  • neg_val (int/list of int) – integer(s) that does NOT satisfy the condition (AND clause)

  • gt_val (int) – condition met if greater than or equal to this value (AND clause)

  • lt_val (int) – condition met if lesser than or equal to this value (AND clause)

__module__ = 'framework.node'
check(node)
class framework.node.Node(name, base_node=None, copy_dico=None, ignore_frozen_state=False, accept_external_entanglement=False, acceptance_set=None, subnodes=None, values=None, value_type=None, vt=None, new_env=False, description=None)

Bases: object

A Node is the basic building-block used within a graph-based data model.

internals

Contains all the configuration of a node. A configuration is associated to the internals/contents of a node, which can live independently of the other configuration.

Type

dict of str –> NodeInternals

current_conf

Identifier to a configuration. Every usable node use at least one main configuration, namely 'MAIN'.

Type

str

name

Identifier of a node. Defined at instantiation. Shall be unique from its parent perspective.

Type

str

env

One environment object is added to all the nodes of a node graph when the latter is registered within a data model (cf. DataModel.register()). It is used for sharing global resources between nodes.

Type

Env

entangled_nodes

Collection of all the nodes entangled with this one. All the entangled nodes will react the same way as one of their peers (within some extent) if this peer is subjected to a stimuli. The node’s properties related to entanglement are only the ones that directly define a node. For instance, changing a node’s NodeInternals will propagate to its entangled peers but changing the state of a node’s NodeInternals won’t propagate. It is used for dealing with multiple instance of a same node (within the scope of a NonTerm node—cf. NodeInternals_NonTerm.get_subnodes_with_csts()). But this mechanism can also be used for your own specific purpose.

Type

set(Node)

semantics

(optional) Used to associate a semantics to a node. Can be used during graph traversal in order to perform actions related to semantics.

Type

NodeSemantics

fuzz_weight

The fuzz weight is an optional attribute of Node() which express Data Model designer’s hints for prioritizing the nodes to fuzz. If set, this attribute is used by some generic disruptors (the ones that rely on a ModelWalker object—refer to fuzzing_primitives.py)

Type

int

depth

Depth of the node within the graph from a specific given root. Will be computed lazily (only when requested).

Type

int

tmp_ref_count

(internal use) Temporarily used during the creation of multiple instance of a same node, especially in order to generate unique names.

Type

int

_post_freeze_handler

Is executed just after a node is frozen (which is the result of requesting its value when it is not freezed—e.g., at its creation).

Type

function

CORRUPT_EXIST_COND = 5
CORRUPT_NODE_QTY = 7
CORRUPT_QTY_SYNC = 6
CORRUPT_SIZE_SYNC = 8
DEFAULT_DISABLED_NODEINT = <framework.node.NodeInternals_Empty object>
DEFAULT_DISABLED_VALUE = b''
DJOBS_PRIO_dynhelpers = 200
DJOBS_PRIO_genfunc = 300
DJOBS_PRIO_nterm_existence = 100
__copy__()
__get_confs()
__get_current_internals()
__get_internals()
__getattr__(name)
__getitem__(key)
__hash__()

Return hash(self).

__init__(name, base_node=None, copy_dico=None, ignore_frozen_state=False, accept_external_entanglement=False, acceptance_set=None, subnodes=None, values=None, value_type=None, vt=None, new_env=False, description=None)
Parameters
  • name (str) – Name of the node. Every children node of a node shall have a unique name. Useful to look for specific nodes within a graph.

  • subnodes (list) – (Optional) List of subnodes. If provided the Node will be created as a non-terminal node.

  • values (list) – (Optional) List of strings. If provided the instantiated node will be a String-typed leaf node (taking its possible values from the parameter).

  • value_type (VT) – (Optional) The value type that characterize the node. Defined within value_types.py and inherits from either VT or VT_Alt. If provided the instantiated node will be a value_type-typed leaf node.

  • vt (VT) – alias to value_type.

  • base_node (Node) – (Optional) If provided, it will be used as a template to create the new node.

  • ignore_frozen_state (bool) – [If base_node provided] If True, the clone process of base_node will ignore its current state.

  • accept_external_entanglement (bool) – [If base_node provided] If True, during the cloning process of base_node, every entangled nodes outside the current graph will be referenced within the new node without being copied. Otherwise, a Warning message will be raised.

  • acceptance_set (set) – [If base_node provided] If provided, will be used as a set of entangled nodes that could be referenced within the new node during the cloning process.

  • copy_dico (dict) – [If base_node provided] It is used internally during the cloning process, and should not be used for any functional purpose.

  • new_env (bool) – [If base_node provided] If True, the base_node attached Env() will be copied. Otherwise, the same will be used. If ignore_frozen_state is True, a new Env() will be used.

  • description (str) – textual description of the node

__lt__(other)

Return self<value.

__module__ = 'framework.node'
__set_current_internals(internal)
__setitem__(key, val)
__str__()

Return str(self).

_check_conf(conf)
_compute_confs(conf, recursive)
_finalize_nonterm_node(conf, depth=None)
_get_all_paths_rec(pname, htable, conf, recursive, first=True, resolve_generator=False, clone_idx=0)
_get_value(conf=None, recursive=True, return_node_internals=False, restrict_csp=False)
_post_freeze(node_internals, wrapping_node, next_conf, recursive, return_node_internals)
static _print(msg, rgb, style='', nl=True, log_func=<built-in method write of _io.TextIOWrapper object>, pretty_print=True)
static _print_contents(msg, style='', nl=True, log_func=<built-in method write of _io.TextIOWrapper object>, pretty_print=True)
static _print_name(msg, style='', nl=True, log_func=<built-in method write of _io.TextIOWrapper object>, pretty_print=True)
static _print_nonterm(msg, style='\x1b[1m', nl=True, log_func=<built-in method write of _io.TextIOWrapper object>, pretty_print=True)
static _print_raw(msg, style='', nl=True, hlight=False, log_func=<built-in method write of _io.TextIOWrapper object>, pretty_print=True)
static _print_type(msg, style='\x1b[1m', nl=True, log_func=<built-in method write of _io.TextIOWrapper object>, pretty_print=True)
_reset_depth(parent_depth)
_set_clone_info(info, node)

Used to propagate random draw results when a NonTerm node is frozen to the dynamic nodes of its attached subgraphs, namely GenFunc/Func nodes which are the only ones which can act dynamically.

_set_subtrees_current_conf(node, conf, reverse, ignore_entanglement=False)
_tobytes(conf=None, recursive=True)
absorb(blob, constraints=AbsCsts(), conf=None, pending_postpone_desc=None)
add_conf(conf)
property c

Property linked to self.internals (read only)

property cc

Property linked to the current node’s internals (read / write)

clear_attr(name, conf=None, all_conf=False, recursive=False)
property color_enabled
compliant_with(internals_criteria=None, semantics_criteria=None, conf=None)
conf(conf=None)
property confs

Property giving all node’s configurations (read only)

property debug
disable_color()
enable_color()
enforce_absorb_constraints(csts, conf=None)
entangle_with(node)
static filter_out_entangled_nodes(node_list)
fix_synchronized_nodes(conf=None)
freeze(conf=None, recursive=True, return_node_internals=False, restrict_csp=False, resolve_csp=False)
Parameters
  • conf

  • recursive

  • return_node_internals

  • restrict_csp – Only effective when a CSP is part of the data description. When set to True, if the node on which this method is called is a variable of the CSP, then its domain will be shrunk to its current value. Thus, the node won’t change when the CSP will be resolved.

  • resolve_csp – Only effective when a CSP is part of the data description. When set to True, the CSP will be resolved and the data generated will comply with the solution.

Returns:

gather_alt_confs()
get_all_paths(conf=None, recursive=True, depth_min=None, depth_max=None, resolve_generator=False, flush_cache=True)
Parameters

resolve_generator – if True, the generator nodes will be resolved in order to perform the search within. But there could be side-effects on the graph, because some parts of the graph could end up frozen if they are used as generator parameters. If False, generator nodes won’t be resolved, but they could already be in a resolved state before this method is called on them. It means that no side effects could result from the call of this method. And thus for this latter case, the method works as if resolve_generator is set to True.

Returns

the keys are either a ‘path’ or a tuple (‘path’, int) when the path already

exists (case of the same node used more than once within the same non-terminal)

Return type

dict

get_all_paths_from(node, conf=None, flush_cache=True, resolve_generator=False)
get_clone(name=None, ignore_frozen_state=False, accept_external_entanglement=False, acceptance_set=None, new_env=True)

Create a new node. To be used within a graph-based data model.

Parameters
  • name (str) – name of the new Node instance. If None the current name will be used.

  • ignore_frozen_state (bool) – if set to False, the clone function will produce a Node with the same state as the duplicated Node. Otherwise, only the state won’t be kept.

  • accept_external_entanglement (bool) – refer to the corresponding Node parameter

  • acceptance_set (set) – refer to the corresponding Node parameter

  • new_env (bool) – If True, the current Env() will be copied. Otherwise, the same will be used.

Returns

duplicated Node object

Return type

Node

get_csp()
get_current_conf()
get_env()
get_first_node_by_path(path_regexp, conf=None, flush_cache=True, resolve_generator=False)

Return the first Node that match the path_regexp parameter.

Parameters
  • path_regexp (str) – path regexp of the requested nodes

  • conf (str) – Node configuration to use for the search

  • flush_cache (bool) – If False, and a previous search has been performed, the outcomes will be used for this one, which will improve the performance.

Returns

the first Node that match the path regexp

Return type

Node

get_fuzz_weight()

Return the fuzzing weight of the node.

Returns

the fuzzing weight

Return type

int

get_internals_backup()
get_nodes_by_paths(path_list)

Provide a dictionnary of the nodes referenced by the paths provided in @path_list. Keys of the dict are the paths provided in @path_list.

Parameters

path_list – list of paths referencing nodes of interest

Returns

dictionary mapping path to nodes

Return type

dict

get_nodes_names(conf=None, verbose=False, terminal_only=False, flush_cache=True)
get_path_from(node, conf=None, flush_cache=True, resolve_generator=False)
get_private(conf=None)
get_reachable_nodes(internals_criteria=None, semantics_criteria=None, owned_conf=None, conf=None, path_regexp=None, exclude_self=False, respect_order=False, top_node=None, ignore_fstate=False, resolve_generator=False, relative_depth=- 1)
Parameters
  • internals_criteria

  • semantics_criteria

  • owned_conf

  • conf

  • path_regexp

  • exclude_self

  • respect_order

  • top_node

  • ignore_fstate

  • resolve_generator – if True, the generator nodes will be resolved in order to perform the search within. But there will be side-effects on the graph, because some parts of the graph could end up frozen if they are used as generator parameters. If False, generator nodes won’t be resolved, but they could already be in a resolved state before this method is called on them. It means that no side effects could result from the call of this method. And thus for this latter case, the method works as if resolve_generator is set to True.

  • relative_depth – For internal use only

Returns:

get_semantics()
get_value(conf=None, recursive=True, return_node_internals=False, restrict_csp=False, resolve_csp=False)
Parameters
  • conf

  • recursive

  • return_node_internals

  • restrict_csp – Only effective when a CSP is part of the data description. When set to True, if the node on which this method is called is a variable of the CSP, then its domain will be shrunk to its current value. Thus, the node won’t change when the CSP will be resolved.

  • resolve_csp – Only effective when a CSP is part of the data description. When set to True, the CSP will be resolved and the data generated will comply with the solution.

Returns:

is_attr_set(name, conf=None)
is_conf_existing(conf)
is_empty(conf=None)
is_exhausted(conf=None)
is_frozen(conf=None)
is_func(conf=None)
is_genfunc(conf=None)
is_nonterm(conf=None)
is_path_valid(path, resolve_generator=False)
is_term(conf=None)
is_typed_value(conf=None, subkind=None)
iter_nodes_by_path(path_regexp, conf=None, flush_cache=True, resolve_generator=False)

iterate over all the nodes that match the path_regexp parameter.

Note: the set of nodes that is used to perform the search include the node itself and all the subnodes behind it.

Parameters
  • path_regexp (str) – path regexp of the requested nodes

  • conf (str) – Node configuration to use for the search

  • flush_cache (bool) – If False, and a previous search has been performed, the outcomes will be used for this one, which will improve the performance.

Returns

generator of the nodes that match the path regexp

iter_paths(conf=None, recursive=True, depth_min=None, depth_max=None, only_paths=False, resolve_generator=False, flush_cache=True)
make_determinist(conf=None, all_conf=False, recursive=False)
make_empty(conf=None)
make_finite(conf=None, all_conf=False, recursive=False)
make_infinite(conf=None, all_conf=False, recursive=False)
make_random(conf=None, all_conf=False, recursive=False)
make_synchronized_with(scope, node=None, param=None, sync_obj=None, conf=None)
property no_more_solution_for_csp
pretty_print(max_size=None, conf=None)
register_post_freeze_handler(func)
remove_conf(conf)
reset_fuzz_weight(recursive=False)

Reset to standard (1) the fuzzing weight that is associated to this node, and all its subnodes if recursive parameter is set to True.

Parameters

recursive (bool) – if set to True, reset also every subnodes (all reachable nodes from this one).

Returns

None

reset_state(recursive=False, exclude_self=False, conf=None, ignore_entanglement=False)
set_absorb_helper(helper, conf=None)
set_attr(name, conf=None, all_conf=False, recursive=False)
set_contents(base_node, copy_dico=None, ignore_frozen_state=False, accept_external_entanglement=False, acceptance_set=None, preserve_node=True)

Set the contents of the node based on the one provided within base_node. This method performs a deep copy of base_node, but some parameters can change the behavior of the copy.

Note

python deepcopy() is not used for performance reason (10 to 20 times slower) and as it does not work for all cases.

Parameters
  • base_node (Node) – (Optional) Used as a template to create the new node.

  • ignore_frozen_state (bool) – If True, the clone process of base_node will ignore its current state.

  • preserve_node (bool) – preserve the NodeInternals attributes (making sense to preserve) of the possible overwritten NodeInternals.

  • accept_external_entanglement (bool) – If True, during the cloning process of base_node, every entangled nodes outside the current graph will be referenced within the new node without being copied. Otherwise, a Warning message will be raised.

  • acceptance_set (set) – If provided, will be used as a set of entangled nodes that could be referenced within the new node during the cloning process.

  • copy_dico (dict) – It is used internally during the cloning process, and should not be used for any functional purpose.

Returns

For each subnodes of base_node (keys), reference the corresponding subnodes within the new node.

Return type

dict

set_csp(csp: framework.constraint_helpers.CSP)
set_current_conf(conf, recursive=True, reverse=False, root_regexp=None, ignore_entanglement=False)
set_default_value(value, conf=None)
set_env(env)
set_frozen_value(value, conf=None)
set_func(func, func_node_arg=None, func_arg=None, conf=None, ignore_entanglement=False, provide_helpers=False, preserve_node=True)
set_fuzz_weight(w)

Set the fuzzing weight of the node to w.

The fuzz weight is an optional attribute of Node() which express Data Model designer’s hints for prioritizing the nodes to fuzz. If set, this attribute is used by some generic disruptors (the ones that rely on a ModelWalker object—refer to fuzzing_primitives.py)

Parameters

w (int) – Value of the weight (by default every nodes has a weight of 1)

Returns

None

set_generator_func(gen_func, func_node_arg=None, func_arg=None, conf=None, ignore_entanglement=False, provide_helpers=False, preserve_node=True)
set_internals(backup)
set_private(val, conf=None)
set_semantics(sem)
set_size_from_constraints(size=None, encoded_size=None, conf=None)
set_subnodes_basic(node_list, conf=None, ignore_entanglement=False, separator=None, preserve_node=True)
set_subnodes_full_format(subnodes_order, subnodes_attrs, conf=None, separator=None, preserve_node=True)
set_subnodes_with_csts(wlnode_list, conf=None, ignore_entanglement=False, separator=None, preserve_node=True)
set_values(values=None, value_type=None, conf=None, ignore_entanglement=False, preserve_node=True)
show(conf=None, verbose=True, print_name_func=None, print_contents_func=None, print_raw_func=None, print_nonterm_func=None, print_type_func=None, alpha_order=False, raw_limit=None, log_func=<built-in method write of _io.TextIOWrapper object>, pretty_print=True, display_title=True, display_gen_node=True)
synchronized_with(scope, conf=None)
to_ascii(conf=None, recursive=True)
to_bytes(conf=None, recursive=True)
to_formatted_str(conf=None, recursive=True)
to_str(conf=None, recursive=True)
unfreeze(conf=None, recursive=True, dont_change_state=False, ignore_entanglement=False, only_generators=False, reevaluate_constraints=False, walk_csp=False, walk_csp_step_size=1)
unfreeze_all(recursive=True, ignore_entanglement=False)
update(node_update_dict, stop_on_error=True)
walk(conf=None, recursive=True, steps_num=1)
class framework.node.NodeAbstraction

Bases: object

This class can be used in place of an node_arg for Func and GenFunc Nodes. It enables you to define in your data model higher level classes upon Nodes to facilitate Nodes manipulation within Func and GenFunc Nodes, with regards to your data model paradigm.

__module__ = 'framework.node'
get_concrete_nodes()

Shall return an Node or a list of Nodes

make_private()

This method is called during Node copy process. It aims to make all your metadata private (if needed). Note that you don’t have to deal with your Nodes.

set_concrete_nodes(nodes_args)

Shall save an Node or a list of Nodes (depending on what returns get_concrete_nodes())

class framework.node.NodeCondition

Bases: object

Base class for every node-related conditions. (Note that NodeCondition may be copied many times. If some attributes need to be fully copied, handle this through __copy__() overriding).

__annotations__ = {}
__module__ = 'framework.node'
_check_inclusion(curr_val, val=None, neg_val=None)
_check_int(val, gt_val=None, lt_val=None)
check(node)
class framework.node.NodeCustomization(items_to_set=None, items_to_clear=None, transform_func=None)

Bases: object

Base class for node cutomization

__annotations__ = {}
__copy__()
__getitem__(key)
__init__(items_to_set=None, items_to_clear=None, transform_func=None)
__module__ = 'framework.node'
_custo_items = {}
clear_items(items_to_clear)
copy_from(node_custo)
set_items(items_to_set)
property transform_func
class framework.node.NodeInternals(arg=None)

Bases: object

Base class for implementing the contents of a node.

Abs_Postpone = 6
AutoSeparator = 16
DEBUG = 40
DISABLED = 100
Determinist = 3
Finite = 4
Freezable = 1
Highlight = 30
LOCKED = 50
Mutable = 2
Separator = 15
__hash__()

Return hash(self).

__init__(arg=None)
__module__ = 'framework.node'
_clear_attr_direct(name)
_get_value(conf=None, recursive=True, return_node_internals=False, restrict_csp=False)
_init_specific(arg)
_make_private_specific(ignore_frozen_state, accept_external_entanglement)
_make_specific(name)
_match_mandatory_attrs(criteria)
_match_mandatory_custo(criteria)
_match_negative_attrs(criteria)
_match_negative_custo(criteria)
_match_negative_node_kinds(criteria)
_match_negative_node_subkinds(criteria)
_match_node_constraints(criteria)
_match_node_kinds(criteria)
_match_node_subkinds(criteria)
_set_attr_direct(name)
_unmake_specific(name)
_update_node_refs(node_dico, debug)
absorb(blob, constraints, conf, pending_postpone_desc=None)
clear_attr(name)
clear_child_attr(name, conf=None, all_conf=False, recursive=False)
clear_clone_info_since(node)

Cleanup obsolete graph internals information prior to what has been registered with the node given as parameter.

customize(custo)
property debug
default_custo = None
enforce_absorb_constraints(csts)
property env
get_attrs_copy()
get_current_subkind()
get_node_sync(scope)
get_private()
get_raw_value(**kwargs)
has_subkinds()
property highlight
is_attr_set(name)
is_exhausted()
is_frozen()
make_private(ignore_frozen_state, accept_external_entanglement, delayed_node_internals, forget_original_sync_objs=False)
match(internals_criteria)
pretty_print(max_size=None)
reset_depth_specific(depth)
set_absorb_helper(helper)
set_attr(name)
set_attrs_from(all_attrs)
set_child_attr(name, conf=None, all_conf=False, recursive=False)
set_clone_info(info, node)

Report to Node._set_clone_info() some information about graph internals

set_contents_from(node_internals)
set_node_sync(scope, node=None, param=None, sync_obj=None)
set_private(val)
set_size_from_constraints(size, encoded_size)
synchronize_nodes(src_node)
class framework.node.NodeInternalsCriteria(mandatory_attrs=None, negative_attrs=None, node_kinds=None, negative_node_kinds=None, node_subkinds=None, negative_node_subkinds=None, mandatory_custo=None, negative_custo=None, required_csts=None, negative_csts=None)

Bases: object

__init__(mandatory_attrs=None, negative_attrs=None, node_kinds=None, negative_node_kinds=None, node_subkinds=None, negative_node_subkinds=None, mandatory_custo=None, negative_custo=None, required_csts=None, negative_csts=None)
__module__ = 'framework.node'
_handle_user_input(crit)
clear_node_constraint(cst)
extend(ic)
get_all_node_constraints()
get_node_constraint(cst)
has_node_constraints()
set_node_constraint(cst, required)
class framework.node.NodeInternals_Empty(arg=None)

Bases: framework.node.NodeInternals

__annotations__ = {}
__module__ = 'framework.node'
_get_value(conf=None, recursive=True, return_node_internals=False, restrict_csp=False)
get_child_nodes_by_attr(internals_criteria, semantics_criteria, owned_conf, conf, path_regexp, exclude_self, respect_order, relative_depth, top_node, ignore_fstate, resolve_generator=False)
get_raw_value(**kwargs)
set_child_env(env)
class framework.node.NodeInternals_Func(arg=None)

Bases: framework.node.NodeInternals_Term

__annotations__ = {}
__get_value_specific_mode1(conf, recursive)

In mode1, we freeze ‘node_arg’ attribute and give the value to the function

__get_value_specific_mode2(conf, recursive)

In mode2, we give the ‘node_arg’ to the function and let it do whatever it wants

__module__ = 'framework.node'
_get_value_specific(conf, recursive)
_init_specific(arg)
_make_private_term_specific(ignore_frozen_state, accept_external_entanglement)
_reset_state_specific(recursive, exclude_self, conf, ignore_entanglement)
_unfreeze_reevaluate_constraints(current_val)
_unfreeze_without_state_change(current_val)
absorb(blob, constraints, conf, pending_postpone_desc=None)
cancel_absorb()
clear_clone_info_since(node)

Cleanup obsolete graph internals information prior to what has been registered with the node given as parameter.

confirm_absorb()
customize(custo)
default_custo = <framework.node.FuncCusto object>
get_node_args()
import_func(fct, fct_node_arg=None, fct_arg=None, provide_helpers=False)
make_args_private(node_dico, entangled_set, ignore_frozen_state, accept_external_entanglement)
set_clone_info(info, node)

Report to Node._set_clone_info() some information about graph internals

set_func_arg(node=None, fct_arg=None)
set_size_from_constraints(size, encoded_size)
class framework.node.NodeInternals_GenFunc(arg=None)

Bases: framework.node.NodeInternals

__annotations__ = {}
__getattr__(name)
__module__ = 'framework.node'
_get_delayed_value(conf=None, recursive=True, restrict_csp=False)
_get_value(conf=None, recursive=True, return_node_internals=False, restrict_csp=False)
_init_specific(arg)
_make_private_specific(ignore_frozen_state, accept_external_entanglement)
_make_specific(name)
_unmake_specific(name)
absorb(blob, constraints, conf, pending_postpone_desc=None)
cancel_absorb()
clear_child_attr(name, conf=None, all_conf=False, recursive=False)
clear_clone_info_since(node)

Cleanup obsolete graph internals information prior to what has been registered with the node given as parameter.

confirm_absorb()
default_custo = <framework.node.GenFuncCusto object>
property env
property generated_node
get_child_all_path(name, htable, conf, recursive, resolve_generator=False)
get_child_nodes_by_attr(internals_criteria, semantics_criteria, owned_conf, conf, path_regexp, exclude_self, respect_order, relative_depth, top_node, ignore_fstate, resolve_generator=False)
get_node_args()
get_raw_value(**kwargs)
import_generator_func(generator_func, generator_node_arg=None, generator_arg=None, provide_helpers=False)
is_exhausted()
is_frozen()
make_args_private(node_dico, entangled_set, ignore_frozen_state, accept_external_entanglement)
reset_depth_specific(depth)
reset_fuzz_weight(recursive)
reset_generator()
reset_state(recursive=False, exclude_self=False, conf=None, ignore_entanglement=False)
set_child_attr(name, conf=None, all_conf=False, recursive=False)
set_child_current_conf(node, conf, reverse, ignore_entanglement)
set_child_env(env)
set_clone_info(info, node)

Report to Node._set_clone_info() some information about graph internals

set_generator_func_arg(generator_node_arg=None, generator_arg=None)
set_size_from_constraints(size, encoded_size)
unfreeze(conf=None, recursive=True, dont_change_state=False, ignore_entanglement=False, only_generators=False, reevaluate_constraints=False)
unfreeze_all(recursive=True, ignore_entanglement=False)
class framework.node.NodeInternals_NonTerm(arg=None)

Bases: framework.node.NodeInternals

It is a kind of node internals that enable to structure the graph through a specific grammar…

INFINITY_LIMIT = 30
class NodeAttrs

Bases: object

__copy__()
__module__ = 'framework.node'
_current_qty = None
_default_qty = None
_max = None
_min = None
_planned_reset = False
_previous_current_qty_was_none = False
_previous_qty = None
_qty_sequence = None
property current_qty
property default_qty
exhausted_seq = False
next_qty()
perform_planned_reset()
plan_reset()
property qty
property qty_sequence
reset()
unplan_reset()
unroll()
__annotations__ = {}
__iter_csts(node_list)
__iter_csts_verbose(node_list)
__module__ = 'framework.node'
static _cleanup_delayed_nodes(node, node_list, idx, conf, rec)
_cleanup_entangled_nodes()
_cleanup_entangled_nodes_from(node)
_clear_drawn_node_attrs(node)
_clone_node(base_node, node_no, force_clone=False, ignore_frozen_state=True)
_clone_node_cleanup()
_clone_separator(sep_node, unique, force_clone=False, ignore_frozen_state=True)
_clone_separator_cleanup()
_construct_subnodes(node_desc, subnode_list, mode, ignore_sep_fstate, ignore_separator=False, lazy_mode=True)
_copy_nodelist(node_list)
static _existence_from_node(node)
static _expand_delayed_nodes(node, node_list, idx, conf, rec)
_get_heavier_component(comp_list, check_existence=False)
_get_info_from_subnode_description(node_desc)
static _get_next_heavier_component(comp_list, excluded_idx)
static _get_next_random_component(comp_list, excluded_idx, seed=None)
_get_node_and_minmax_from(node_desc)
_get_node_from(node_desc)
_get_random_component(comp_list, total_weight, check_existence=False)
_get_value(conf=None, recursive=True, after_encoding=True, return_node_internals=False, restrict_csp=False)

The parameter return_node_internals is not used for non terminal nodes, only for terminal nodes. However, keeping it also for non terminal nodes avoid additional checks in the code.

_init_specific(arg)
_make_private_specific(ignore_frozen_state, accept_external_entanglement)
_make_specific(name)
_parse_node_desc(node_desc)
_precondition_subnode_ops()
static _qty_from_node(node)
_reset_state_info(new_info=None, nodes_drawn_qty=None)
_set_drawn_node_attrs(node, nb, sz)
static _size_from_node(node, for_encoded_size=False)
_unmake_specific(name)
absorb(blob, constraints, conf, pending_postpone_desc=None)
TOFIX: Checking existence condition independently of data

description order is not supported. Only supported within the same non-terminal node. Use delayed job infrastructure to cover all cases (TBC).

add(node, min=1, max=1, default_qty=None, after=None, before=None, idx=None)

This method add a new node to this non-terminal. The location and the quantity can be configured through the parameters.

Parameters
  • node (Node) – The node to add

  • min – The minimum number of repetition of this node within the non-terminal node

  • max – The maximum number of repetition of this node within the non-terminal node

  • default_qty – the default number of repetition of this node within the non-terminal node

  • after – If not None, it should be the node (within the non-terminal) after which the new node will be inserted.

  • before – If not None, it should be the node (within the non-terminal) before which the new node will be inserted.

  • idx – If not None, it should provide the position in the list of subnodes where the new node will be inserted.

cancel_absorb()
change_subnodes_csts(csts_ch)
clear_child_attr(name, conf=None, all_conf=False, recursive=False)
clear_clone_info_since(node)

Cleanup obsolete graph internals information prior to what has been registered with the node given as parameter.

confirm_absorb()
default_custo = <framework.node.NonTermCusto object>
static existence_corrupt_hook(node, exist)
flatten_node_list(node_list)

Return a list of the form: [subnode1, subnode2, subnode3, ….] In case of Pick-type sections within the parent node, sublists are included within the previous one and include the alternative subnodes, so that the list looks like: [subnode1, [snode21, snode22, …], subnode3, ….]

Parameters

node_list

Returns:

get_child_all_path(name, htable, conf, recursive, resolve_generator=False)
get_child_nodes_by_attr(internals_criteria, semantics_criteria, owned_conf, conf, path_regexp, exclude_self, respect_order, relative_depth, top_node, ignore_fstate, resolve_generator=False)
get_drawn_node_qty(node_ref)
get_raw_value(**kwargs)
get_separator_node()
get_subnode(num)
get_subnode_default_qty(node)
get_subnode_idx(node)
get_subnode_minmax(node)
get_subnode_off(num)
get_subnode_qty()
get_subnodes_collection()
get_subnodes_csts_copy(node_dico=None)
get_subnodes_with_csts()

Generate the structure of the non terminal node.

import_subnodes_basic(node_list, separator=None, preserve_node=False)
import_subnodes_full_format(subnodes_order=None, subnodes_attrs=None, frozen_node_list=None, current_flat_nodelist=None, internals=None, nodes_drawn_qty=None, custo=None, exhaust_info=None, separator=None)
import_subnodes_with_csts(wlnode_list, separator=None, preserve_node=False)
is_exhausted()
is_frozen()
make_private_subnodes(node_dico, func_nodes, env, ignore_frozen_state, accept_external_entanglement, entangled_set, delayed_node_internals)
static nodeqty_corrupt_hook(node, mini, maxi)
static qtysync_corrupt_hook(node, qty)
replace_subnode(old, new)
reset(nodes_drawn_qty=None, custo=None, exhaust_info=None, preserve_node=False)
reset_depth_specific(depth)
reset_fuzz_weight(recursive)
reset_state(recursive=False, exclude_self=False, conf=None, ignore_entanglement=False)
set_child_attr(name, conf=None, all_conf=False, recursive=False)
set_child_current_conf(node, conf, reverse, ignore_entanglement)
set_child_env(env)
set_clone_info(info, node)

Report to Node._set_clone_info() some information about graph internals

set_encoder(encoder)
set_separator_node(sep_node, prefix=True, suffix=True, unique=False, always=False)
set_size_from_constraints(size, encoded_size)
set_subnode_default_qty(node, default_qty=None)
set_subnode_minmax(node, min=None, max=None)
static sizesync_corrupt_hook(node, length)
structure_will_change()

To be used only in Finite mode. Return True if the structure will change the next time _get_value() will be called.

Returns: bool

unfreeze(conf=None, recursive=True, dont_change_state=False, ignore_entanglement=False, only_generators=False, reevaluate_constraints=False)
unfreeze_all(recursive=True, ignore_entanglement=False)
class framework.node.NodeInternals_Term(arg=None)

Bases: framework.node.NodeInternals

__annotations__ = {}
__module__ = 'framework.node'
static _convert_to_internal_repr(val)
_get_value(conf=None, recursive=True, return_node_internals=False, restrict_csp=False)
_get_value_specific(conf, recursive)
_init_specific(arg)
_make_private_specific(ignore_frozen_state, accept_external_entanglement)
_make_private_term_specific(ignore_frozen_state, accept_external_entanglement)
_reset_state_specific(recursive, exclude_self, conf, ignore_entanglement)
_set_default_value(val)
_set_default_value_specific(val)
_set_frozen_value(val)
_unfreeze_reevaluate_constraints(current_val)
_unfreeze_without_state_change(current_val)
_update_value_specific(value)
absorb(blob, constraints, conf, pending_postpone_desc=None)
absorb_auto_helper(blob, constraints)
cancel_absorb()
confirm_absorb()
do_absorb(blob, constraints, off, size)
do_cleanup_absorb()
do_revert_absorb()
get_child_all_path(name, htable, conf, recursive, resolve_generator=False)
get_child_nodes_by_attr(internals_criteria, semantics_criteria, owned_conf, conf, path_regexp, exclude_self, respect_order, relative_depth, top_node, ignore_fstate, resolve_generator=False)
get_raw_value(**kwargs)
is_exhausted()
is_frozen()
reset_depth_specific(depth)
reset_fuzz_weight(recursive)
reset_state(recursive=False, exclude_self=False, conf=None, ignore_entanglement=False)
set_child_current_conf(node, conf, reverse, ignore_entanglement)
set_child_env(env)
unfreeze(conf=None, recursive=True, dont_change_state=False, ignore_entanglement=False, only_generators=False, reevaluate_constraints=False)
unfreeze_all(recursive=True, ignore_entanglement=False)
update_value(value)
class framework.node.NodeInternals_TypedValue(arg=None)

Bases: framework.node.NodeInternals_Term

__annotations__ = {}
__getattr__(name)
__module__ = 'framework.node'
_get_value_specific(conf=None, recursive=True)
_init_specific(arg)
_make_private_term_specific(ignore_frozen_state, accept_external_entanglement)
_make_specific(name)
_reset_state_specific(recursive, exclude_self, conf, ignore_entanglement)
_set_default_value_specific(val)
_unfreeze_reevaluate_constraints(current_val)
_unfreeze_without_state_change(current_val)
_unmake_specific(name)
_update_value_specific(value)
absorb_auto_helper(blob, constraints)
do_absorb(blob, constraints, off, size)
do_cleanup_absorb()
do_revert_absorb()
get_current_subkind()
get_raw_value(**kwargs)
get_specific_fuzzy_values()
get_value_type()
has_subkinds()
import_value_type(value_type)
is_exhausted()
pretty_print(max_size=None)
set_size_from_constraints(size, encoded_size)
set_specific_fuzzy_values(vals)
class framework.node.NodeSemantics(attrs=None)

Bases: object

To be used while defining a data model as a means to associate semantics to an Node.

__init__(attrs=None)
__module__ = 'framework.node'
__str__()

Return str(self).

_match_exclusive_criteria(criteria)
_match_mandatory_criteria(criteria)
_match_negative_criteria(criteria)
_match_optionalbut1_criteria(criteria)
add_attributes(attrs)
make_private()

This method is called during Node copy process. It aims to make all your metadata private (if needed).

match(semantics_criteria)

This method is called within get_reachable_nodes() (when the ‘semantics’ parameter is provided) to select Node that match the given semantics.

what_match_from(raw_criteria_list)
class framework.node.NodeSemanticsCriteria(optionalbut1_criteria=None, mandatory_criteria=None, exclusive_criteria=None, negative_criteria=None)

Bases: object

__bool__()
__init__(optionalbut1_criteria=None, mandatory_criteria=None, exclusive_criteria=None, negative_criteria=None)
__module__ = 'framework.node'
_handle_user_input(crit)
extend(sc)
get_exclusive_criteria()
get_mandatory_criteria()
get_negative_criteria()
get_optionalbut1_criteria()
set_exclusive_criteria(criteria)
set_mandatory_criteria(criteria)
set_negative_criteria(criteria)
set_optionalbut1_criteria(criteria)
class framework.node.NodeSeparator(node, prefix=True, suffix=True, unique=False, always=False)

Bases: object

A node separator is used (optionnaly) by a non-terminal node as a separator between each subnode.

make_private

used for full copy

Type

function

__init__(node, prefix=True, suffix=True, unique=False, always=False)
Parameters
  • node (Node) – node to be used for separation.

  • prefix (bool) – if True, a serapator will also be placed at the begining.

  • suffix (bool) – if True, a serapator will also be placed at the end.

  • unique (bool) – if False, the same node will be used for each separation, otherwise a new node will be generated.

  • always (bool) – if True, the separator will be always generated even if the subnodes it separates are not generated because their evaluated quantity is 0.

__module__ = 'framework.node'
make_private(node_dico, ignore_frozen_state)
class framework.node.NonTermCusto(items_to_set=None, items_to_clear=None, transform_func=None)

Bases: framework.node.NodeCustomization

Non-terminal node behavior-customization To be provided to NodeInternals.customize()

CollapsePadding = 4
CycleClone = 2
DelayCollapsing = 5
FrozenCopy = 3
FullCombinatory = 6
MutableClone = 1
StickToDefault = 7
__annotations__ = {}
__module__ = 'framework.node'
_custo_items = {1: True, 2: False, 3: True, 4: False, 5: False, 6: False, 7: False}
property collapse_padding_mode
property cycle_clone_mode
property delay_collapsing
property frozen_copy_mode
property full_combinatory_mode
property mutable_clone_mode
property stick_to_default_mode
class framework.node.RawCondition(val=None, neg_val=None, cond_func=None, case_sensitive=True)

Bases: framework.node.NodeCondition

__annotations__ = {}
__init__(val=None, neg_val=None, cond_func=None, case_sensitive=True)
Parameters
  • val (bytes/list of bytes) – value(s) that satisfies the condition

  • neg_val (bytes/list of bytes) – value(s) that does NOT satisfy the condition (AND clause)

  • cond_func – function that takes the node value and return a boolean

  • case_sensitive – if False, ignore case for performing comparison

__module__ = 'framework.node'
_handle_cond(val)
check(node)
class framework.node.SyncExistenceObj(sync_list, and_junction=True)

Bases: framework.node.SyncObj

__init__(sync_list, and_junction=True)
__module__ = 'framework.node'
_condition_satisfied(node, condition)
check()
get_node_containers()

Shall return either a Node or a list of Nodes or a list of (Node, param) where param should provide __copy__ method if needed.

put_node_containers(new_containers)

This method will be called to provide updated containers that should replace the old ones.

Parameters

new_containers – the updated containers

class framework.node.SyncObj

Bases: object

__annotations__ = {}
__module__ = 'framework.node'
_sync_nodes_specific(src_node)
get_node_containers()

Shall return either a Node or a list of Nodes or a list of (Node, param) where param should provide __copy__ method if needed.

make_private(node_dico)
put_node_containers(new_containers)

This method will be called to provide updated containers that should replace the old ones.

Parameters

new_containers – the updated containers

synchronize_nodes(src_node)
class framework.node.SyncQtyFromObj(node, base_qty=0)

Bases: framework.node.SyncObj

__annotations__ = {}
__init__(node, base_qty=0)
__module__ = 'framework.node'
get_node_containers()

Shall return either a Node or a list of Nodes or a list of (Node, param) where param should provide __copy__ method if needed.

put_node_containers(new_containers)

This method will be called to provide updated containers that should replace the old ones.

Parameters

new_containers – the updated containers

property qty
class framework.node.SyncScope(value)

Bases: enum.Enum

An enumeration.

Existence = 10
Inexistence = 11
Qty = 1
QtyFrom = 2
Size = 20
__module__ = 'framework.node'
class framework.node.SyncSizeObj(node, base_size=0, apply_to_enc_size=False)

Bases: framework.node.SyncObj

__annotations__ = {}
__init__(node, base_size=0, apply_to_enc_size=False)
__module__ = 'framework.node'
_sync_nodes_specific(src_node)
get_node_containers()

Shall return either a Node or a list of Nodes or a list of (Node, param) where param should provide __copy__ method if needed.

put_node_containers(new_containers)

This method will be called to provide updated containers that should replace the old ones.

Parameters

new_containers – the updated containers

set_size_on_source_node(size)
property size_for_absorption
framework.node.flatten(nested)
framework.node.make_entangled_nodes(node_list)
framework.node.make_wrapped_node(name, vals=None, node=None, prefix=None, suffix=None, key_node_name='KEY_ELT')
framework.node.split_verbose_with(predicate, iterable)
framework.node.split_with(predicate, iterable)

13.2.5. framework.node_builder module

class framework.node_builder.NodeBuilder(dm=None, delayed_jobs=True, add_env=True, default_gen_custo=None, default_nonterm_custo=None)

Bases: object

HIGH_PRIO = 1
LOW_PRIO = 3
MEDIUM_PRIO = 2
RootNS = 1
VERYLOW_PRIO = 4
__get_node_from_db(name_desc, namespace=None)
__handle_clone(desc, parent_node, namespace=None)
__init__(dm=None, delayed_jobs=True, add_env=True, default_gen_custo=None, default_nonterm_custo=None)

Help the process of data description. This class is able to construct a framework.data_model.Node object from a JSON-like description.

Parameters
  • dm (DataModel) – a DataModel object, only required if the ‘import_from’ statement is used with create_graph_from_desc().

  • delayed_jobs (bool) – Enable or disabled delayed jobs feature. Used for instance for delaying constraint that cannot be solved immediately.

  • add_env (bool) – If True, an framework.data_model.Env object will be assigned to the generated framework.data_model.Node from create_graph_from_desc(). Should be set to False if you consider using the generated Node within another description or if you will copy it for building a new node type. Keeping an Env() object can be dangerous if you make some clones of it and don’t pay attention to set a new Env() for each copy, because. A graph node SHALL have only one Env() shared between all the nodes and an Env() shall not be shared between independent graph (otherwise it could lead to unexpected results).

  • default_gen_custo – override default Generator node customization

  • default_nonterm_custo – override default NonTerminal node customization

__module__ = 'framework.node_builder'
__post_handling(desc, node, namespace=None)
__pre_handling(desc, node, namespace=None)
_clone_from_dict(node, ref, desc, current_ns)
_complete_func(node, args, conf, from_ns, current_ns)
_complete_generator(node, args, conf, from_ns, current_ns)
_complete_generator_from_desc(node, args, conf)
_create_generator_node(desc, node=None, namespace=None)
_create_graph_from_desc(desc, parent_node, namespace=None)
_create_leaf_node(desc, node=None, namespace=None)
_create_nodes_from_shape(shapes, parent_node, shape_type='>', dup_mode='u', namespace=None)
_create_non_terminal_node(desc, node=None, namespace=None)
_create_non_terminal_node_from_regex(desc, node=None, namespace=None)
_create_todo_list()
_get_from_dict(node, ref, parent_node)
_handle_common_attr(node, desc, conf, current_ns=None)
_handle_custo(node, desc, conf)
_handle_name(name_desc, namespace=None)
_register_todo(node, func, args=None, unpack_args=True, prio=2, last_position=False)
_set_env(node, args)
_set_sync_node(node, comp, scope, conf, private, from_ns)
_setup_constraints(node, constraints, root_namespace, constraint_highlight)
_update_provided_node(desc, node=None, namespace=None)
_verify_keys_conformity(desc)
create_graph_from_desc(desc)
ic = <framework.node.NodeInternalsCriteria object>
valid_keys = ['name', 'contents', 'qty', 'clone', 'type', 'alt', 'conf', 'custo_set', 'custo_clear', 'evolution_func', 'description', 'default_qty', 'namespace', 'from_namespace', 'highlight', 'constraints', 'constraints_highlight', 'weight', 'shape_type', 'section_type', 'duplicate_mode', 'weights', 'separator', 'prefix', 'suffix', 'unique', 'always', 'encoder', 'node_args', 'other_args', 'provide_helpers', 'trigger_last', 'specific_fuzzy_vals', 'default', 'import_from', 'data_id', 'determinist', 'random', 'finite', 'infinite', 'mutable', 'clear_attrs', 'set_attrs', 'absorb_csts', 'absorb_helper', 'semantics', 'fuzz_weight', 'sync_qty_with', 'qty_from', 'exists_if', 'exists_if_not', 'exists_if/and', 'exists_if/or', 'sync_size_with', 'sync_enc_size_with', 'post_freeze', 'charset', 'debug']
class framework.node_builder.RegexParser(machine=None)

Bases: framework.node_builder.StateMachine

class Brackets(machine=None)

Bases: framework.node_builder.StateMachine, framework.node_builder.RegexParser.QtyState

class Comma(machine)

Bases: framework.node_builder.RegexParser.Brackets.Max

INITIAL = False
__annotations__ = {}
__module__ = 'framework.node_builder'
_run(ctx)
class Final(machine)

Bases: framework.node_builder.State

INITIAL = False
__annotations__ = {}
__module__ = 'framework.node_builder'
_run(ctx)
advance(context)

Check transitions using the first non-run character. :param context: root state machine (global context) :type context: StateMachine

Returns

Class of the next state de run (None if we are in a final state)

INITIAL = False
class Initial(machine)

Bases: framework.node_builder.State

INITIAL = True
__annotations__ = {}
__module__ = 'framework.node_builder'
_run(ctx)
advance(ctx)

Check transitions using the first non-run character. :param context: root state machine (global context) :type context: StateMachine

Returns

Class of the next state de run (None if we are in a final state)

class Max(machine)

Bases: framework.node_builder.State

INITIAL = False
__annotations__ = {}
__module__ = 'framework.node_builder'
_run(ctx)
advance(context)

Check transitions using the first non-run character. :param context: root state machine (global context) :type context: StateMachine

Returns

Class of the next state de run (None if we are in a final state)

class Min(machine)

Bases: framework.node_builder.State

INITIAL = False
__annotations__ = {}
__module__ = 'framework.node_builder'
_run(ctx)
advance(context)

Check transitions using the first non-run character. :param context: root state machine (global context) :type context: StateMachine

Returns

Class of the next state de run (None if we are in a final state)

__annotations__ = {}
__module__ = 'framework.node_builder'
advance(ctx)

Check transitions using the first non-run character. :param context: root state machine (global context) :type context: StateMachine

Returns

Class of the next state de run (None if we are in a final state)

class Choice(machine)

Bases: framework.node_builder.RegexParser.Initial

INITIAL = False
__annotations__ = {}
__module__ = 'framework.node_builder'
_run(ctx)
class Dot(machine)

Bases: framework.node_builder.RegexParser.Group

INITIAL = False
__annotations__ = {}
__module__ = 'framework.node_builder'
_run(ctx)
class Escape(machine)

Bases: framework.node_builder.State

INITIAL = False
__annotations__ = {}
__module__ = 'framework.node_builder'
_run(ctx)
advance(ctx)

Check transitions using the first non-run character. :param context: root state machine (global context) :type context: StateMachine

Returns

Class of the next state de run (None if we are in a final state)

class EscapeMetaSequence(machine)

Bases: framework.node_builder.RegexParser.Group

INITIAL = False
__annotations__ = {}
__module__ = 'framework.node_builder'
_run(ctx)
class Final(machine)

Bases: framework.node_builder.State

INITIAL = False
__annotations__ = {}
__module__ = 'framework.node_builder'
_run(ctx)
advance(ctx)

Check transitions using the first non-run character. :param context: root state machine (global context) :type context: StateMachine

Returns

Class of the next state de run (None if we are in a final state)

class Group(machine)

Bases: framework.node_builder.State

__annotations__ = {}
__module__ = 'framework.node_builder'
advance(ctx)

Check transitions using the first non-run character. :param context: root state machine (global context) :type context: StateMachine

Returns

Class of the next state de run (None if we are in a final state)

class Initial(machine)

Bases: framework.node_builder.State

INITIAL = True
__annotations__ = {}
__module__ = 'framework.node_builder'
_run(ctx)
advance(ctx)

Check transitions using the first non-run character. :param context: root state machine (global context) :type context: StateMachine

Returns

Class of the next state de run (None if we are in a final state)

class Main(machine)

Bases: framework.node_builder.State

INITIAL = False
__annotations__ = {}
__module__ = 'framework.node_builder'
_run(ctx)
advance(ctx)

Check transitions using the first non-run character. :param context: root state machine (global context) :type context: StateMachine

Returns

Class of the next state de run (None if we are in a final state)

class Parenthesis(machine=None)

Bases: framework.node_builder.StateMachine, framework.node_builder.RegexParser.Group

class Choice(machine)

Bases: framework.node_builder.RegexParser.Parenthesis.Initial

INITIAL = False
__annotations__ = {}
__module__ = 'framework.node_builder'
_run(ctx)
advance(ctx)

Check transitions using the first non-run character. :param context: root state machine (global context) :type context: StateMachine

Returns

Class of the next state de run (None if we are in a final state)

class Escape(machine)

Bases: framework.node_builder.State

INITIAL = False
__annotations__ = {}
__module__ = 'framework.node_builder'
_run(ctx)
advance(ctx)

Check transitions using the first non-run character. :param context: root state machine (global context) :type context: StateMachine

Returns

Class of the next state de run (None if we are in a final state)

class Final(machine)

Bases: framework.node_builder.State

INITIAL = False
__annotations__ = {}
__module__ = 'framework.node_builder'
_run(context)
advance(context)

Check transitions using the first non-run character. :param context: root state machine (global context) :type context: StateMachine

Returns

Class of the next state de run (None if we are in a final state)

INITIAL = False
class Initial(machine)

Bases: framework.node_builder.State

INITIAL = True
__annotations__ = {}
__module__ = 'framework.node_builder'
_run(ctx)
advance(ctx)

Check transitions using the first non-run character. :param context: root state machine (global context) :type context: StateMachine

Returns

Class of the next state de run (None if we are in a final state)

class Main(machine)

Bases: framework.node_builder.RegexParser.Parenthesis.Initial

INITIAL = False
__annotations__ = {}
__module__ = 'framework.node_builder'
_run(ctx)
advance(ctx)

Check transitions using the first non-run character. :param context: root state machine (global context) :type context: StateMachine

Returns

Class of the next state de run (None if we are in a final state)

__annotations__ = {}
__module__ = 'framework.node_builder'
class QtyState(machine)

Bases: framework.node_builder.State

INITIAL = False
__annotations__ = {}
__module__ = 'framework.node_builder'
_run(ctx)
advance(ctx)

Check transitions using the first non-run character. :param context: root state machine (global context) :type context: StateMachine

Returns

Class of the next state de run (None if we are in a final state)

class SquareBrackets(machine=None)

Bases: framework.node_builder.StateMachine, framework.node_builder.RegexParser.Group

class AfterRange(machine)

Bases: framework.node_builder.RegexParser.SquareBrackets.Initial

INITIAL = False
__annotations__ = {}
__module__ = 'framework.node_builder'
_run(ctx)
advance(ctx)

Check transitions using the first non-run character. :param context: root state machine (global context) :type context: StateMachine

Returns

Class of the next state de run (None if we are in a final state)

class BeforeRange(machine)

Bases: framework.node_builder.RegexParser.SquareBrackets.Initial

INITIAL = False
__annotations__ = {}
__module__ = 'framework.node_builder'
_run(ctx)
advance(ctx)

Check transitions using the first non-run character. :param context: root state machine (global context) :type context: StateMachine

Returns

Class of the next state de run (None if we are in a final state)

class EscapeAfterRange(machine)

Bases: framework.node_builder.State

INITIAL = False
__annotations__ = {}
__module__ = 'framework.node_builder'
_run(ctx)
advance(ctx)

Check transitions using the first non-run character. :param context: root state machine (global context) :type context: StateMachine

Returns

Class of the next state de run (None if we are in a final state)

class EscapeBeforeRange(machine)

Bases: framework.node_builder.State

INITIAL = False
__annotations__ = {}
__module__ = 'framework.node_builder'
_run(ctx)
advance(ctx)

Check transitions using the first non-run character. :param context: root state machine (global context) :type context: StateMachine

Returns

Class of the next state de run (None if we are in a final state)

class EscapeMetaSequence(machine)

Bases: framework.node_builder.RegexParser.SquareBrackets.BeforeRange

INITIAL = False
__annotations__ = {}
__module__ = 'framework.node_builder'
_run(ctx)
class Final(machine)

Bases: framework.node_builder.State

INITIAL = False
__annotations__ = {}
__module__ = 'framework.node_builder'
_run(ctx)
advance(ctx)

Check transitions using the first non-run character. :param context: root state machine (global context) :type context: StateMachine

Returns

Class of the next state de run (None if we are in a final state)

INITIAL = False
class Initial(machine)

Bases: framework.node_builder.State

INITIAL = True
__annotations__ = {}
__module__ = 'framework.node_builder'
_run(ctx)
advance(ctx)

Check transitions using the first non-run character. :param context: root state machine (global context) :type context: StateMachine

Returns

Class of the next state de run (None if we are in a final state)

class Range(machine)

Bases: framework.node_builder.State

INITIAL = False
__annotations__ = {}
__module__ = 'framework.node_builder'
_run(ctx)
advance(ctx)

Check transitions using the first non-run character. :param context: root state machine (global context) :type context: StateMachine

Returns

Class of the next state de run (None if we are in a final state)

__annotations__ = {}
__module__ = 'framework.node_builder'
__module__ = 'framework.node_builder'
_create_non_terminal_node()
_create_terminal_node(name, type, values=None, alphabet=None, qty=None)
append_to_alphabet(alphabet)
append_to_buffer(str)
append_to_contents(content)
property buffer
flush()
init_specific()

Can be overridden to express additional initializations

parse(inputs, name, charset=2)
reset()
start_new_shape()
start_new_shape_from_buffer()
class framework.node_builder.State(machine)

Bases: object

Represent states at the lower level

__annotations__ = {}
__init__(machine)
Parameters

machine (StateMachine) – state machine where it lives (local context)

__module__ = 'framework.node_builder'
_run(context)
advance(context)

Check transitions using the first non-run character. :param context: root state machine (global context) :type context: StateMachine

Returns

Class of the next state de run (None if we are in a final state)

init_specific()

Can be overridden to express additional initializations

run(context)

Do some actions on the current character. :param context: root state machine (global context) :type context: StateMachine

class framework.node_builder.StateMachine(machine=None)

Bases: framework.node_builder.State

Represent states that contain other states.

__annotations__ = {}
__init__(machine=None)
Parameters

machine (StateMachine) – state machine where it lives (local context)

__module__ = 'framework.node_builder'
_run(context)
property input
run(context)

Do some actions on the current character. :param context: root state machine (global context) :type context: StateMachine

framework.node_builder.initial(cls)
framework.node_builder.register(cls)

13.2.6. framework.value_types module

class framework.value_types.BitField(subfield_limits=None, subfield_sizes=None, subfield_values=None, subfield_val_extremums=None, padding=0, lsb_padding=True, show_padding=False, endian=1, determinist=True, subfield_descs=None, subfield_value_descs=None, defaults=None)

Bases: framework.value_types.VT_Alt

Provide: - either @subfield_limits or @subfield_sizes - either @subfield_values or @subfield_val_extremums

__compute_total_possible_values()

the returned number correspond to the total number of values that can be returned by the BitField in determinist mode. This number does not cover all the values such a BitField should be able to generate. Refer to get_value() comments for more information.

__init__(subfield_limits=None, subfield_sizes=None, subfield_values=None, subfield_val_extremums=None, padding=0, lsb_padding=True, show_padding=False, endian=1, determinist=True, subfield_descs=None, subfield_value_descs=None, defaults=None)
__module__ = 'framework.value_types'
_check_constraints(sf_values)
_enable_fuzz_mode(fuzz_magnitude=1.0)
_enable_normal_mode()
_encode_bitfield(val)
_read_value_from(blob, size, endian, constraints)

Used by .do_absorb(). side effect: may change self.padding_one dictionary.

_reset_idx(reset_idx_inuse=True)
absorb_auto_helper(blob, constraints)
after_enabling_mode()
property bit_length
property byte_length
change_subfield(idx, values=None, extremums=None)

Change the constraints on a given subfield.

Parameters
  • idx (int) – subfield index, from 0 (low significant subfield) to nb_subfields-1 (specific index -1 is used to choose the last subfield).

  • values (list) – new values for the subfield (remove previous value list or remove previous extremums if no value list was used for this subfield)

  • extremums (list) – new extremums for the subfield (remove previous extremums or remove previous value list if no extremums were used for this subfield)

property count_of_possible_values

the returned number correspond to the total number of values that can be returned by the BitField in determinist mode. This number does not cover all the values such a BitField should be able to generate. Refer to get_value() comments for more information.

do_absorb(blob, constraints, off=0, size=None)
do_cleanup_absorb()

To be called after self.do_absorb() or self.do_revert_absorb()

do_revert_absorb()

If needed should be called just after self.do_absorb().

extend(bitfield, rightside=True)
extend_left(bitfield)
extend_right(bitfield)
get_current_raw_val()
get_current_value()

Provide the current value of the object. Should not change the state of the object except if no current values.

Returns: bytes

get_subfield(idx)
get_value()

In determinist mode, all the values such a BitField should be able to generate are not covered but only a subset of them (i.e., all combinations are not computed). It has been chosen to only keep the value based on the following algorithm: “exhaust each subfield one at a time”.

Rationale: In most cases, computing all combinations does not make sense for fuzzing purpose.

idx_from_desc(sf_desc)
is_compatible(integer, size)
is_exhausted()
make_determinist()
make_private(forget_current_state)
make_random()
padding_one = [0, 1, 3, 7, 15, 31, 63, 127]
pretty_print(max_size=None)
reset_state()
rewind()
set_bitfield(sf_values=None, sf_val_extremums=None, sf_limits=None, sf_sizes=None, sf_descs=None, sf_val_descs=None, sf_defaults=None)
set_default_value(sf_values)
set_size_from_constraints(size=None, encoded_size=None)
set_subfield(idx, val)
Parameters
  • idx – Either an integer which should be the subfield index, from 0 (low significant subfield) to nb_subfields-1 (specific index -1 is used to choose the last subfield). Or a string which should be the description of the field.

  • val (int) – new value for the subfield

update_raw_value(val)
class framework.value_types.Filename(values=None, size=None, min_sz=None, max_sz=None, determinist=True, codec='latin-1', case_sensitive=True, default=None, extra_fuzzy_list=None, absorb_regexp=None, alphabet=None, min_encoded_sz=None, max_encoded_sz=None, encoding_arg=None, values_desc=None, **kwargs)

Bases: framework.value_types.String

__annotations__ = {}
__module__ = 'framework.value_types'
_get_path_depth(path)
_get_path_from_value(value, knowledge)

Returned path always terminates with a separator

linux_prefix = [b'../', b'..\xc0\xaf', b'\xc0\xae\xc0\xae\xc0\xaf']
linux_specific_fnames = [b'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA', b'././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././././TEST']
linux_suffix = [b'etc/password']
path_mode = False
subclass_specific_init(specific_suffix=None, uri_parsing=False)

Specific init for Filename

Parameters
  • specific_suffix – List of specific suffixes that will be used for path traversal test cases in addition to the current list.

  • uri_parsing – if the filename is to be consumed as an URI

subclass_specific_test_cases(knowledge, orig_val, fuzz_magnitude=1.0)

To be overwritten by class that inherits from String if specific test cases need to be implemented

Parameters
  • knowledge

  • orig_val

  • fuzz_magnitude

Returns

list of test cases or None

Return type

list

uri_prefix = [b'%2e%2e%2f', b'%2e%2e/', b'..%2f', b'..%252f', b'.%252e/', b'%2e%2e%5c', b'..%255c', b'..%c0%af', b'%c0%ae%c0%ae%c0%af']
uri_suffix = [b'MARKER.txt']
windows_prefix = [b'..\\', b'\xc0\xae\xc0\xae\\']
windows_specific_fnames = [b'PRN', b'NUL.txt', b'C:\\..\\..\\', b'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA.txt']
windows_suffix = [b'Windows\\system.ini']
class framework.value_types.FolderPath(values=None, size=None, min_sz=None, max_sz=None, determinist=True, codec='latin-1', case_sensitive=True, default=None, extra_fuzzy_list=None, absorb_regexp=None, alphabet=None, min_encoded_sz=None, max_encoded_sz=None, encoding_arg=None, values_desc=None, **kwargs)

Bases: framework.value_types.Filename

__annotations__ = {}
__module__ = 'framework.value_types'
subclass_specific_test_cases(knowledge, orig_val, fuzz_magnitude=1.0)

To be overwritten by class that inherits from String if specific test cases need to be implemented

Parameters
  • knowledge

  • orig_val

  • fuzz_magnitude

Returns

list of test cases or None

Return type

list

class framework.value_types.GSM7bitPacking(values=None, size=None, min_sz=None, max_sz=None, determinist=True, codec='latin-1', case_sensitive=True, default=None, extra_fuzzy_list=None, absorb_regexp=None, alphabet=None, min_encoded_sz=None, max_encoded_sz=None, encoding_arg=None, values_desc=None, **kwargs)

Bases: framework.value_types.String

__annotations__ = {}
__module__ = 'framework.value_types'
_encoder_arg = None
_encoder_cls

alias of framework.encoders.GSM7bitPacking_Enc

init_encoder()
class framework.value_types.GSMPhoneNum(values=None, size=None, min_sz=None, max_sz=None, determinist=True, codec='latin-1', case_sensitive=True, default=None, extra_fuzzy_list=None, absorb_regexp=None, alphabet=None, min_encoded_sz=None, max_encoded_sz=None, encoding_arg=None, values_desc=None, **kwargs)

Bases: framework.value_types.String

__annotations__ = {}
__module__ = 'framework.value_types'
_encoder_arg = None
_encoder_cls

alias of framework.encoders.GSMPhoneNum_Enc

init_encoder()
class framework.value_types.GZIP(values=None, size=None, min_sz=None, max_sz=None, determinist=True, codec='latin-1', case_sensitive=True, default=None, extra_fuzzy_list=None, absorb_regexp=None, alphabet=None, min_encoded_sz=None, max_encoded_sz=None, encoding_arg=None, values_desc=None, **kwargs)

Bases: framework.value_types.String

__annotations__ = {}
__module__ = 'framework.value_types'
_encoder_arg = None
_encoder_cls

alias of framework.encoders.GZIP_Enc

init_encoder()
class framework.value_types.INT(values=None, min=None, max=None, default=None, determinist=True, force_mode=False, fuzz_mode=False, values_desc=None)

Bases: framework.value_types.VT

Base class to be inherited and not used directly

GEN_MAX_INT = 4294967296
GEN_MIN_INT = -4294967296
__annotations__ = {}
__init__(values=None, min=None, max=None, default=None, determinist=True, force_mode=False, fuzz_mode=False, values_desc=None)
__module__ = 'framework.value_types'
_check_constraints_and_update(val, no_update=False)
_convert_value(val)
_read_value_from(blob, size)
_unconvert_value(val)
absorb_auto_helper(blob, constraints)
add_specific_fuzzy_vals(vals)
alt_cformat = None
cformat = None
copy_attrs_from(vt)
determinist = True
do_absorb(blob, constraints, off=0, size=None)
do_cleanup_absorb()
do_revert_absorb()

If needed should be called just after self.do_absorb().

endian = None
fuzzy_values = None
get_current_raw_val()
get_current_value()

Provide the current value of the object. Should not change the state of the object except if no current values.

Returns: bytes

get_fuzzed_vt_list()
get_specific_fuzzy_vals()
get_value()

Walk other the values of the object on a per-call basis.

Returns: bytes

is_compatible(integer)
is_exhausted()
is_size_compatible(integer)
make_determinist()
make_private(forget_current_state)
make_random()
maxi = None
maxi_gen = None
mini = None
mini_gen = None
pretty_print(max_size=None)
reset_state()
rewind()
set_default_value(val)
set_size_from_constraints(size=None, encoded_size=None)
size = None
update_raw_value(val)
usable = False
value_space_size = None
class framework.value_types.INT16(values=None, min=None, max=None, default=None, determinist=True, force_mode=False, fuzz_mode=False, values_desc=None)

Bases: framework.value_types.INT

__annotations__ = {}
__module__ = 'framework.value_types'
fuzzy_values = [65535, 0, 32768, 32767]
size = 16
usable = False
value_space_size = 65535
class framework.value_types.INT32(values=None, min=None, max=None, default=None, determinist=True, force_mode=False, fuzz_mode=False, values_desc=None)

Bases: framework.value_types.INT

__annotations__ = {}
__module__ = 'framework.value_types'
fuzzy_values = [4294967295, 0, 2147483648, 2147483647]
size = 32
usable = False
value_space_size = 4294967295
class framework.value_types.INT64(values=None, min=None, max=None, default=None, determinist=True, force_mode=False, fuzz_mode=False, values_desc=None)

Bases: framework.value_types.INT

__annotations__ = {}
__module__ = 'framework.value_types'
fuzzy_values = [18446744073709551615, 0, 9223372036854775808, 9223372036854775807, 1229782938247303441]
size = 64
usable = False
value_space_size = 18446744073709551615
class framework.value_types.INT8(values=None, min=None, max=None, default=None, determinist=True, force_mode=False, fuzz_mode=False, values_desc=None)

Bases: framework.value_types.INT

__annotations__ = {}
__module__ = 'framework.value_types'
fuzzy_values = [255, 0, 1, 128, 127]
size = 8
usable = False
value_space_size = 255
class framework.value_types.INT_str(values=None, min=None, max=None, default=None, determinist=True, force_mode=False, fuzz_mode=False, base=10, letter_case='upper', min_size=None, reverse=False)

Bases: framework.value_types.INT

__annotations__ = {}
__init__(values=None, min=None, max=None, default=None, determinist=True, force_mode=False, fuzz_mode=False, base=10, letter_case='upper', min_size=None, reverse=False)
__module__ = 'framework.value_types'
_convert_value(val)
_prepare_format_str(min_size, base, letter_case)
_read_value_from(blob, size)
_unconvert_value(val)
copy_attrs_from(vt)
endian = 3
fuzzy_values = [0, -1, -4294967296, 4294967295, 4294967296]
get_fuzzed_vt_list()
is_compatible(integer)
pretty_print(max_size=None)
regex_bin = b'-?[01]'
regex_decimal = b'-?\\d'
regex_lower_hex = b'-?[0123456789abcdef]'
regex_octal = b'-?[01234567]'
regex_upper_hex = b'-?[0123456789ABCDEF]'
usable = True
value_space_size = -1
class framework.value_types.SINT16_be(values=None, min=None, max=None, default=None, determinist=True, force_mode=False, fuzz_mode=False, values_desc=None)

Bases: framework.value_types.INT16

__annotations__ = {}
__module__ = 'framework.value_types'
alt_cformat = '>H'
cformat = '>h'
endian = 1
maxi = 32767
mini = -32768
usable = True
class framework.value_types.SINT16_le(values=None, min=None, max=None, default=None, determinist=True, force_mode=False, fuzz_mode=False, values_desc=None)

Bases: framework.value_types.INT16

__annotations__ = {}
__module__ = 'framework.value_types'
alt_cformat = '<H'
cformat = '<h'
endian = 2
maxi = 32767
mini = -32768
usable = True
class framework.value_types.SINT32_be(values=None, min=None, max=None, default=None, determinist=True, force_mode=False, fuzz_mode=False, values_desc=None)

Bases: framework.value_types.INT32

__annotations__ = {}
__module__ = 'framework.value_types'
alt_cformat = '>L'
cformat = '>l'
endian = 1
maxi = 2147483647
mini = -2147483648
usable = True
class framework.value_types.SINT32_le(values=None, min=None, max=None, default=None, determinist=True, force_mode=False, fuzz_mode=False, values_desc=None)

Bases: framework.value_types.INT32

__annotations__ = {}
__module__ = 'framework.value_types'
alt_cformat = '<L'
cformat = '<l'
endian = 2
maxi = 2147483647
mini = -2147483648
usable = True
class framework.value_types.SINT64_be(values=None, min=None, max=None, default=None, determinist=True, force_mode=False, fuzz_mode=False, values_desc=None)

Bases: framework.value_types.INT64

__annotations__ = {}
__module__ = 'framework.value_types'
alt_cformat = '>Q'
cformat = '>q'
endian = 1
maxi = 9223372036854775807
mini = -9223372036854775808
usable = True
class framework.value_types.SINT64_le(values=None, min=None, max=None, default=None, determinist=True, force_mode=False, fuzz_mode=False, values_desc=None)

Bases: framework.value_types.INT64

__annotations__ = {}
__module__ = 'framework.value_types'
alt_cformat = '<Q'
cformat = '<q'
endian = 2
maxi = 9223372036854775807
mini = -9223372036854775808
usable = True
class framework.value_types.SINT8(values=None, min=None, max=None, default=None, determinist=True, force_mode=False, fuzz_mode=False, values_desc=None)

Bases: framework.value_types.INT8

__annotations__ = {}
__module__ = 'framework.value_types'
alt_cformat = 'B'
cformat = 'b'
endian = 3
maxi = 127
mini = -128
usable = True
class framework.value_types.String(values=None, size=None, min_sz=None, max_sz=None, determinist=True, codec='latin-1', case_sensitive=True, default=None, extra_fuzzy_list=None, absorb_regexp=None, alphabet=None, min_encoded_sz=None, max_encoded_sz=None, encoding_arg=None, values_desc=None, **kwargs)

Bases: framework.value_types.VT_Alt

Value type that represents a character string.

encoded_string

shall be set to True by any subclass that deals with encoding

Type

bool

subclass_fuzzing_list

attribute to be added by subclasses that provide specific test cases.

Type

list

ASCII = 'ascii'
DEFAULT_MAX_SZ = 10000
LATIN_1 = 'iso8859-1'
UTF16BE = 'utf-16-be'
UTF16LE = 'utf-16-le'
__annotations__ = {}
__init__(values=None, size=None, min_sz=None, max_sz=None, determinist=True, codec='latin-1', case_sensitive=True, default=None, extra_fuzzy_list=None, absorb_regexp=None, alphabet=None, min_encoded_sz=None, max_encoded_sz=None, encoding_arg=None, values_desc=None, **kwargs)

Initialize the String

Parameters
  • values – List of the character strings that are considered valid for the node backed by this String object. The first item of the list is the default value

  • size – Valid character string size for the node backed by this String object.

  • min_sz – Minimum valid size for the character strings for the node backed by this String object. If not set, this parameter will be automatically inferred by looking at the parameter values whether this latter is provided.

  • max_sz – Maximum valid size for the character strings for the node backed by this String object. If not set, this parameter will be automatically inferred by looking at the parameter values whether this latter is provided.

  • determinist – If set to True generated values will be in a deterministic order, otherwise in a random order.

  • codec – codec to use for encoding the string (e.g., ‘latin-1’, ‘utf8’)

  • case_sensitive – If the string is set to be case sensitive then specific additional test cases will be generated in fuzzing mode.

  • default – If not None, this value will be provided by default at first and also each time framework.value_types.String.reset_state() is called().

  • extra_fuzzy_list – During data generation, if this parameter is specified with some specific values, they will be part of the test cases generated by the generic disruptor tTYPE.

  • absorb_regexp (str) – You can specify a regular expression in this parameter as a supplementary constraint for data absorption operation.

  • alphabet – The alphabet to use for generating data, in case no values is provided. Also use during absorption to validate the contents. It is checked if there is no values.

  • values_desc (dict) – Dictionary that maps string values to their descriptions (character strings). Leveraged for display purpose. Even if provided, all values do not need to be described.

  • min_encoded_sz – Only relevant for subclasses that leverage the encoding infrastructure. Enable to provide the minimum legitimate size for an encoded string.

  • max_encoded_sz – Only relevant for subclasses that leverage the encoding infrastructure. Enable to provide the maximum legitimate size for an encoded string.

  • encoding_arg – Only relevant for subclasses that leverage the encoding infrastructure and that allow their encoding scheme to be configured. This parameter is directly provided to String.init_encoding_scheme(). Any object that go through this parameter should support the __copy__ method.

  • kwargs – for subclass usage

__module__ = 'framework.value_types'
__repr__()

Return repr(self).

_bytes2str(val)
_check_alphabet(val, constraints)
_check_constraints(value, force_max_enc_sz, force_min_enc_sz, update_list=False)
_check_constraints_and_update(val)
_check_contents(val, val_sz, constraints)
_check_size_constraints(value)
_enable_fuzz_mode(fuzz_magnitude=1.0)
_enable_normal_mode()
_encoder_cls = None
_encoder_obj = None
_ensure_enc_sizes_consistency()
_populate_values(force_max_enc_sz=False, force_min_enc_sz=False)
_read_value_from(blob, constraints)
_str2bytes(val)
absorb_auto_helper(blob, constraints)
ctrl_char_set = '\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x7f'
decode(val)

Exclusively overloaded by the decorator @from_encoder

do_absorb(blob, constraints, off=0, size=None)

Core function for absorption.

Parameters
  • blob – binary string on which to perform absorption

  • constraints – constraints to comply with

  • off – absorption should start at offset off from blob

  • size – if provided, size relates to the string to be absorbed (which can be encoded)

Returns

value, off, size

do_cleanup_absorb()

To be called after self.do_absorb() or self.do_revert_absorb()

do_revert_absorb()

If needed should be called just after self.do_absorb(). (safe to recall it more than once)

encode(val)

Exclusively overloaded by the decorator @from_encoder

encoded_string = False
encoding_test_cases(current_val, max_sz, min_sz, min_encoded__sz, max_encoded_sz)

To be optionally overloaded by a subclass that deals with encoding in order to provide specific test cases on encoding scheme.

Parameters
  • current_val – the current value (not encoded)

  • max_sz – maximum size for a not encoded string

  • min_sz – minimum size for a not encoded string

  • min_encoded_sz – minimum encoded size for a string

  • max_encoded_sz – maximum encoded size for a string

Returns

the list of encoded test cases

Return type

list

extended_char_set = '\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f\xa0¡¢£¤¥¦§¨©ª«¬\xad®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖ×ØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ'
static fuzz_cases_c_strings(knowledge, orig_val, sz, fuzz_magnitude)

Produces test cases relevant for C strings This method is also used by INT_str()

Parameters
  • knowledge

  • orig_val

  • sz

  • fuzz_magnitude

Returns:

static fuzz_cases_ctrl_chars(knowledge, orig_val, sz, max_sz, codec)

Produces test cases relevant when control characters are interpreted by the consumer This method is also used by INT_str()

Parameters
  • knowledge

  • orig_val

  • sz

  • max_sz

  • codec

Returns:

static fuzz_cases_letter_case(knowledge, orig_val)

Produces test cases relevant if the described element is case sensitive.

Parameters
  • knowledge

  • orig_val

Returns:

get_current_raw_val(str_form=False)
get_current_value()

Provide the current value of the object. Should not change the state of the object except if no current values.

Returns: bytes

get_value()

Walk other the values of the object on a per-call basis.

Returns: bytes

init_encoder = None
is_exhausted()
make_determinist()
make_private(forget_current_state)
make_random()
non_ctrl_char = ' !"#$%&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f\xa0¡¢£¤¥¦§¨©ª«¬\xad®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖ×ØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ'
pretty_print(max_size=None)
printable_char_set = ' !"#$%&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~'
reset_encoder()
reset_state()
rewind()
set_default_value(val)
set_description(values=None, size=None, min_sz=None, max_sz=None, determinist=True, codec='latin-1', case_sensitive=True, default=None, extra_fuzzy_list=None, absorb_regexp=None, alphabet=None, min_encoded_sz=None, max_encoded_sz=None)

@size take precedence over @min_sz and @max_sz

set_size_from_constraints(size=None, encoded_size=None)
subclass_specific_init(**kwargs)

To be overwritten by class that inherits from String if specific init is necessary, for instance new parameters.

Parameters

**kwargs

Returns:

subclass_specific_test_cases(knowledge, orig_val, fuzz_magnitude)

To be overwritten by class that inherits from String if specific test cases need to be implemented

Parameters
  • knowledge

  • orig_val

  • fuzz_magnitude

Returns

list of test cases or None

Return type

list

class framework.value_types.UINT16_be(values=None, min=None, max=None, default=None, determinist=True, force_mode=False, fuzz_mode=False, values_desc=None)

Bases: framework.value_types.INT16

__annotations__ = {}
__module__ = 'framework.value_types'
alt_cformat = '>h'
cformat = '>H'
endian = 1
maxi = 65535
mini = 0
usable = True
class framework.value_types.UINT16_le(values=None, min=None, max=None, default=None, determinist=True, force_mode=False, fuzz_mode=False, values_desc=None)

Bases: framework.value_types.INT16

__annotations__ = {}
__module__ = 'framework.value_types'
alt_cformat = '<h'
cformat = '<H'
endian = 2
maxi = 65535
mini = 0
usable = True
class framework.value_types.UINT32_be(values=None, min=None, max=None, default=None, determinist=True, force_mode=False, fuzz_mode=False, values_desc=None)

Bases: framework.value_types.INT32

__annotations__ = {}
__module__ = 'framework.value_types'
alt_cformat = '>l'
cformat = '>L'
endian = 1
maxi = 4294967295
mini = 0
usable = True
class framework.value_types.UINT32_le(values=None, min=None, max=None, default=None, determinist=True, force_mode=False, fuzz_mode=False, values_desc=None)

Bases: framework.value_types.INT32

__annotations__ = {}
__module__ = 'framework.value_types'
alt_cformat = '<l'
cformat = '<L'
endian = 2
maxi = 4294967295
mini = 0
usable = True
class framework.value_types.UINT64_be(values=None, min=None, max=None, default=None, determinist=True, force_mode=False, fuzz_mode=False, values_desc=None)

Bases: framework.value_types.INT64

__annotations__ = {}
__module__ = 'framework.value_types'
alt_cformat = '>q'
cformat = '>Q'
endian = 1
maxi = 18446744073709551615
mini = 0
usable = True
class framework.value_types.UINT64_le(values=None, min=None, max=None, default=None, determinist=True, force_mode=False, fuzz_mode=False, values_desc=None)

Bases: framework.value_types.INT64

__annotations__ = {}
__module__ = 'framework.value_types'
alt_cformat = '<q'
cformat = '<Q'
endian = 2
maxi = 18446744073709551615
mini = 0
usable = True
class framework.value_types.UINT8(values=None, min=None, max=None, default=None, determinist=True, force_mode=False, fuzz_mode=False, values_desc=None)

Bases: framework.value_types.INT8

__annotations__ = {}
__module__ = 'framework.value_types'
alt_cformat = 'b'
cformat = 'B'
endian = 3
maxi = 255
mini = 0
usable = True
class framework.value_types.VT

Bases: object

Base class to implement Types that are leveraged by typed nodes

BigEndian = 1
LittleEndian = 2
Native = 3
__annotations__ = {}
__module__ = 'framework.value_types'
add_specific_fuzzy_vals(vals)
copy_attrs_from(vt)
enc2struct = {1: '>', 2: '<', 3: '='}
endian = None
get_current_raw_val()
get_current_value()

Provide the current value of the object. Should not change the state of the object except if no current values.

Returns: bytes

get_fuzzed_vt_list()
get_specific_fuzzy_vals()
get_value()

Walk other the values of the object on a per-call basis.

Returns: bytes

is_exhausted()
knowledge_source = None
make_determinist()
make_private(forget_current_state)
make_random()
maxi = None
mini = None
pretty_print(max_size=None)
reset_state()
rewind()
set_default_value(val)
set_size_from_constraints(size=None, encoded_size=None)
class framework.value_types.VT_Alt

Bases: framework.value_types.VT

__annotations__ = {}
__init__()
__module__ = 'framework.value_types'
_enable_fuzz_mode(fuzz_magnitude=1.0)
_enable_normal_mode()
add_specific_fuzzy_vals(vals)
after_enabling_mode()
enable_fuzz_mode(fuzz_magnitude=1.0)
enable_normal_mode()
property fuzz_mode_enabled
get_specific_fuzzy_vals()
switch_mode()
class framework.value_types.Wrapper(values=None, size=None, min_sz=None, max_sz=None, determinist=True, codec='latin-1', case_sensitive=True, default=None, extra_fuzzy_list=None, absorb_regexp=None, alphabet=None, min_encoded_sz=None, max_encoded_sz=None, encoding_arg=None, values_desc=None, **kwargs)

Bases: framework.value_types.String

__annotations__ = {}
__module__ = 'framework.value_types'
_encoder_arg = None
_encoder_cls

alias of framework.encoders.Wrap_Enc

init_encoder()
framework.value_types.from_encoder(encoder_cls, encoding_arg=None)

13.2.7. framework.generic_data_makers module

class framework.generic_data_makers.d_add_data

Bases: framework.tactics_helpers.Disruptor

Add some data within the retrieved input.

__module__ = 'framework.generic_data_makers'
_args_desc = {'after': ('If True, the addition will be done after the selected node. Otherwise, it will be done before.', True, <class 'bool'>), 'atom': ('Name of the atom to add within the retrieved input. It is mutually exclusive with @raw', None, <class 'str'>), 'name': ('If provided, the added node will have this name.', None, <class 'str'>), 'path': ('Graph path to select the node on which the disruptor should apply.', None, <class 'str'>), 'raw': ('Raw value to add within the retrieved input. It is mutually exclusive with @atom.', b'', (<class 'bytes'>, <class 'str'>))}
_modelwalker_user = False
disrupt_data(dm, target, prev_data)
setup(dm, user_input)

–> Specific code return True if setup has succeeded, otherwise return False

class framework.generic_data_makers.d_call_external_program

Bases: framework.tactics_helpers.Disruptor

Call an external program to deal with the data.

__annotations__ = {}
__module__ = 'framework.generic_data_makers'
_args_desc = {'cmd': ('The external command the execute.', None, (<class 'list'>, <class 'tuple'>, <class 'str'>)), 'file_mode': ('If True the data will be provided through a file to the external program, otherwise it will be provided on the command line directly.', True, <class 'bool'>), 'path': ('Graph path regexp to select nodes on which the disruptor should apply.', None, <class 'str'>)}
_get_cmd()
_modelwalker_user = False
disrupt_data(dm, target, prev_data)
setup(dm, user_input)

–> Specific code return True if setup has succeeded, otherwise return False

class framework.generic_data_makers.d_call_function

Bases: framework.tactics_helpers.Disruptor

Call the function provided with the first parameter being the Data() object received as input of this disruptor, and optionally with additional parameters if @params is set. The function should return a Data() object.

The signature of the function should be compatible with:

func(data, *args) –> Data()

__annotations__ = {}
__module__ = 'framework.generic_data_makers'
_args_desc = {'func': ('The function that will be called with a node as its first parameter, and provided optionnaly with addtionnal parameters if @params is set.', <function <lambda>>, <class 'method'>), 'params': ('Tuple of parameters that will be provided to the function.', None, <class 'tuple'>)}
_modelwalker_user = False
disrupt_data(dm, target, prev_data)
class framework.generic_data_makers.d_corrupt_bits_by_position

Bases: framework.tactics_helpers.Disruptor

Corrupt bit at a specific byte.

__annotations__ = {}
__module__ = 'framework.generic_data_makers'
_args_desc = {'ascii': ('Enforce all outputs to be ascii 7bits.', False, <class 'bool'>), 'idx': ('Byte index to be corrupted (from 1 to data length).', 1, <class 'int'>), 'new_val': ('If provided change the selected byte with the new one.', None, <class 'bytes'>)}
_modelwalker_user = False
disrupt_data(dm, target, prev_data)
setup(dm, user_input)

–> Specific code return True if setup has succeeded, otherwise return False

class framework.generic_data_makers.d_corrupt_node_bits

Bases: framework.tactics_helpers.Disruptor

Corrupt bits on some nodes of the data model.

__annotations__ = {}
__module__ = 'framework.generic_data_makers'
_args_desc = {'ascii': ('Enforce all outputs to be ascii 7bits.', False, <class 'bool'>), 'nb': ('Apply corruption on @nb Nodes fetched randomly within the data model.', 2, <class 'int'>), 'new_val': ('If provided change the selected byte with the new one.', None, <class 'str'>), 'path': ('Graph path regexp to select nodes on which the disruptor should apply.', None, <class 'str'>)}
_modelwalker_user = False
disrupt_data(dm, target, prev_data)
setup(dm, user_input)

–> Specific code return True if setup has succeeded, otherwise return False

class framework.generic_data_makers.d_fix_constraints

Bases: framework.tactics_helpers.Disruptor

Fix data constraints.

Release constraints from input data or from only a piece of it (if the parameter path is provided), then recompute them. By constraints we mean every generator (or function) nodes that may embeds constraints between nodes, and every node existence conditions.

__annotations__ = {}
__module__ = 'framework.generic_data_makers'
_args_desc = {'clone_node': ('If True the dmaker will always return a copy of the node. (For stateless disruptors dealing with big data it can be useful to it to False.)', False, <class 'bool'>), 'path': ('Graph path regexp to select nodes on which the disruptor should apply.', None, <class 'str'>)}
_modelwalker_user = False
disrupt_data(dm, target, prev_data)
setup(dm, user_input)

–> Specific code return True if setup has succeeded, otherwise return False

class framework.generic_data_makers.d_fuzz_model_structure

Bases: framework.tactics_helpers.Disruptor

Disrupt the data model structure (replace ordered sections by unordered ones).

__annotations__ = {}
__module__ = 'framework.generic_data_makers'
_args_desc = {'path': ('Graph path regexp to select nodes on which the disruptor should apply.', None, <class 'str'>)}
_modelwalker_user = False
disrupt_data(dm, target, prev_data)
setup(dm, user_input)

–> Specific code return True if setup has succeeded, otherwise return False

class framework.generic_data_makers.d_max_size

Bases: framework.tactics_helpers.Disruptor

Truncate the data (or part of the data) to the provided size.

__annotations__ = {}
__module__ = 'framework.generic_data_makers'
_args_desc = {'path': ('Graph path regexp to select nodes on which the disruptor should apply.', None, <class 'str'>), 'sz': ('Truncate the data (or part of the data) to the provided size.', 10, <class 'int'>)}
_modelwalker_user = False
disrupt_data(dm, target, prev_data)
setup(dm, user_input)

–> Specific code return True if setup has succeeded, otherwise return False

class framework.generic_data_makers.d_modify_nodes

Bases: framework.tactics_helpers.Disruptor

Perform modifications on the provided data. Two ways are possible:

  • Either the change is performed on the content of the nodes specified by the path parameter with the new value provided, and the optional constraints for the absorption (use node absorption infrastructure);

  • Or the changed is performed based on a dictionary provided through the parameter multi_mod

__annotations__ = {}
__module__ = 'framework.generic_data_makers'
_add_info(prev_data, n, new_value, status, size)
_args_desc = {'clone_node': ('If True the dmaker will always return a copy of the node. (For stateless disruptors dealing with big data it can be useful to set it to False.)', False, <class 'bool'>), 'constraints': ('Constraints for the absorption of the new value.', AbsNoCsts(), <class 'framework.global_resources.AbsCsts'>), 'multi_mod': ('Dictionary of <path>:<item> pairs or <NodeSemanticsCriteria>:<item> pairs or <NodeInternalsCriteria>:<item> pairs to change multiple nodes with different values. <item> can be either only the new <value> or a tuple (<value>,<abscsts>) if new constraint for absorption is needed', None, <class 'dict'>), 'path': ('Graph path regexp to select nodes on which the disruptor should apply.', None, <class 'str'>), 'sem': ('Semantics to select nodes on which the disruptor should apply.', None, (<class 'str'>, <class 'list'>)), 'unfold': ('Resolve all the generator nodes within the input before performing the @path/@sem research', False, <class 'bool'>), 'value': ('The new value to inject within the data.', b'', <class 'bytes'>)}
_modelwalker_user = False
disrupt_data(dm, target, prev_data)
setup(dm, user_input)

–> Specific code return True if setup has succeeded, otherwise return False

class framework.generic_data_makers.d_next_node_content

Bases: framework.tactics_helpers.Disruptor

Move to the next content of the nodes from input data or from only a piece of it (if the parameter path is provided). Basically, unfreeze the nodes then freeze them again, which will consequently produce a new data.

__annotations__ = {}
__module__ = 'framework.generic_data_makers'
_args_desc = {'clone_node': ('If True the dmaker will always return a copy of the node. (for stateless disruptors dealing with big data it can be useful to it to False).', False, <class 'bool'>), 'path': ('Graph path regexp to select nodes on which the disruptor should apply.', None, <class 'str'>), 'recursive': ('Apply the disruptor recursively.', True, <class 'str'>)}
_modelwalker_user = False
disrupt_data(dm, target, prev_data)
setup(dm, user_input)

–> Specific code return True if setup has succeeded, otherwise return False

class framework.generic_data_makers.d_operate_on_nodes

Bases: framework.tactics_helpers.Disruptor

Perform an operation on the nodes specified by the regexp path. @op is an operation that applies to a node and @params are a tuple containing the parameters that will be provided to @op. If no path is provided, the root node will be used.

__annotations__ = {}
__module__ = 'framework.generic_data_makers'
_add_info(prev_data, n)
_args_desc = {'clone_node': ('If True the dmaker will always return a copy of the node. (For stateless disruptors dealing with big data it can be useful to set it to False.)', False, <class 'bool'>), 'op': ('The operation to perform on the selected nodes.', <function Node.clear_attr>, <class 'method'>), 'op_ref': ("Predefined operation that can be referenced by name. The current predefined function are: 'unfreeze', 'freeze', 'walk', 'set_qty'. Take precedence over @op if not None.", None, <class 'str'>), 'params': ('Tuple of parameters that will be provided to the operation.', (), <class 'tuple'>), 'path': ('Graph path regexp to select nodes on which the disruptor should apply.', None, <class 'str'>), 'sem': ('Semantics to select nodes on which the disruptor should apply.', None, (<class 'str'>, <class 'list'>))}
_modelwalker_user = False
disrupt_data(dm, target, prev_data)
setup(dm, user_input)

–> Specific code return True if setup has succeeded, otherwise return False

class framework.generic_data_makers.d_shallow_copy

Bases: framework.tactics_helpers.Disruptor

Shallow copy of the input data, which means: ignore its frozen state during the copy.

__annotations__ = {}
__module__ = 'framework.generic_data_makers'
_args_desc = {}
_modelwalker_user = False
disrupt_data(dm, target, prev_data)
setup(dm, user_input)

–> Specific code return True if setup has succeeded, otherwise return False

class framework.generic_data_makers.d_switch_to_alternate_conf

Bases: framework.tactics_helpers.Disruptor

Switch to an alternate configuration.

__annotations__ = {}
__module__ = 'framework.generic_data_makers'
_args_desc = {'conf': ('Change the configuration, with the one provided (by name), of all subnodes fetched by @path, one-by-one. [default value is set dynamically with the first-found existing alternate configuration]', None, <class 'str'>), 'path': ('Graph path regexp to select nodes on which the disruptor should apply.', None, <class 'str'>), 'recursive': ('Does the reachable nodes from the selected ones need also to be changed?', True, <class 'bool'>)}
_modelwalker_user = False
disrupt_data(dm, target, prev_data)
setup(dm, user_input)

–> Specific code return True if setup has succeeded, otherwise return False

class framework.generic_data_makers.g_generic_pattern

Bases: framework.tactics_helpers.Generator

Generate basic data based on a pattern and different parameters.

__annotations__ = {}
__module__ = 'framework.generic_data_makers'
_args_desc = {'eval': ('The pattern will be evaluated before being used. Note that the evaluation shall result in a byte string.', False, <class 'bool'>), 'pattern': ('Pattern to be used for generating data', b'1234567890', <class 'bytes'>), 'prefix': ('Prefix added to the pattern', b'', <class 'bytes'>), 'size': ('Size of the generated data.', None, <class 'int'>), 'suffix': ('Suffix replacing the end of the pattern', b'', <class 'bytes'>)}
_modelwalker_user = False
generate_data(dm, monitor, target)
setup(dm, user_input)

–> Specific code return True if setup has succeeded, otherwise return False

class framework.generic_data_makers.g_population

Bases: framework.tactics_helpers.Generator

Walk through the given population

__annotations__ = {}
__module__ = 'framework.generic_data_makers'
_args_desc = {'population': ('The population to iterate over.', None, <class 'framework.evolutionary_helpers.Population'>), 'track': ('Keep trace of the changes that occurred on data, generation after generation', False, <class 'bool'>)}
_modelwalker_user = False
generate_data(dm, monitor, target)
setup(dm, user_input)

–> Specific code return True if setup has succeeded, otherwise return False

class framework.generic_data_makers.sd_constraint_fuzz

Bases: framework.tactics_helpers.StatefulDisruptor

When the CSP (Constraint Satisfiability Problem) backend are used in the node description. This operator negates the constraint one-by-one and output 1 or more samples for each negated constraint.

__annotations__ = {}
__module__ = 'framework.generic_data_makers'
_args_desc = {'clone_node': ('If True, this operator will always return a copy of the node. (for stateless diruptors dealing with big data it can be usefull to set it to False)', True, <class 'bool'>), 'const_idx': ('Index of the constraint to begin with (first index is 1)', 1, <class 'int'>), 'sample_idx': ('Index of the sample for the selected constraint to begin with (first index is 1)', 1, <class 'int'>), 'samples_per_cst': ('Maximum number of samples to output for each negated constraint (-1 means until the end)', -1, <class 'int'>)}
_modelwalker_user = False
_process_next_constraint()
_update_csp()
disrupt_data(dm, target, data)

@data: it is either equal to prev_data the first time disrupt_data() is called by the FMK, or it is a an empty data (that is Data()).

set_seed(prev_data)
setup(dm, user_input)

–> Specific code return True if setup has succeeded, otherwise return False

class framework.generic_data_makers.sd_fuzz_separator_nodes

Bases: framework.tactics_helpers.StatefulDisruptor

Perform alterations on separators (one at a time). Each time a separator is encountered in the provided data, it will be replaced by another separator picked from the ones existing within the provided data.

__annotations__ = {}
__module__ = 'framework.generic_data_makers'
_args_desc = {'clone_node': ('If True, this operator will always return a copy of the node. (for stateless diruptors dealing with big data it can be usefull to set it to False)', True, <class 'bool'>), 'deep': ('When set to True, if a node structure has changed, the modelwalker will reset its walk through the children nodes.', True, <class 'bool'>), 'init': ('Make the model walker ignore all the steps until the provided one', 1, <class 'int'>), 'max_node_tc': ('Maximum number of test cases per node (-1 means until the end). This value is used for nodes with a fuzz weight strictly greater than 1.', -1, <class 'int'>), 'max_steps': ('Maximum number of steps (-1 means until the end)', -1, <class 'int'>), 'min_node_tc': ('Minimum number of test cases per node (-1 means until the end)', -1, <class 'int'>), 'order': ('When set to True, the fuzzing order is strictly guided by the data structure. Otherwise, fuzz weight (if specified in the data model) is used for ordering.', True, <class 'bool'>), 'path': ('Graph path regexp to select nodes on which the disruptor should apply.', None, <class 'str'>), 'sem': ('Semantics to select nodes on which the disruptor should apply.', None, (<class 'str'>, <class 'list'>))}
_modelwalker_user = True
disrupt_data(dm, target, data)

@data: it is either equal to prev_data the first time disrupt_data() is called by the FMK, or it is a an empty data (that is Data()).

set_seed(prev_data)
setup(dm, user_input)

–> Specific code return True if setup has succeeded, otherwise return False

class framework.generic_data_makers.sd_fuzz_typed_nodes

Bases: framework.tactics_helpers.StatefulDisruptor

Perform alterations on typed nodes (one at a time) according to: - their type (e.g., INT, Strings, …) - their attributes (e.g., allowed values, minimum size, …) - knowledge retrieved from the data (e.g., if the input data uses separators, their symbols are leveraged in the fuzzing) - knowledge on the target retrieved from the project file or dynamically from feedback inspection (e.g., C language, GNU/Linux OS, …)

If the input has different shapes (described in non-terminal nodes), this will be taken into account by fuzzing every shape combinations.

Note: this disruptor includes what tSEP does and goes beyond with respect to separators.

__annotations__ = {}
__module__ = 'framework.generic_data_makers'
_args_desc = {'clone_node': ('If True, this operator will always return a copy of the node. (for stateless diruptors dealing with big data it can be usefull to set it to False)', True, <class 'bool'>), 'consider_sibbling_change': ('[EXPERIMENTAL] While walking through terminal nodes, if sibbling nodes are no more the same because of existence condition for instance, walk through the new nodes. (Currently, work only with some specific data model construction.)', False, <class 'bool'>), 'deep': ('When set to True, if a node structure has changed, the modelwalker will reset its walk through the children nodes.', True, <class 'bool'>), 'fix': ("Limit constraints fixing to the nodes related to the currently fuzzed one (only implemented for 'sync_size_with' and 'sync_enc_size_with').", True, <class 'bool'>), 'fix_all': ('For each produced data, reevaluate the constraints on the whole graph.', False, <class 'bool'>), 'full_combinatory': ('When set to True, enable full-combinatory mode for non-terminal nodes. It means that the non-terminal nodes will be customized in "FullCombinatory" mode', False, <class 'bool'>), 'fuzz_mag': ('Order of magnitude for maximum size of some fuzzing test cases.', 1.0, <class 'float'>), 'ign_mutable_attr': ('Walk through all the nodes even if their Mutable attribute is cleared.', False, <class 'bool'>), 'ign_sep': ('When set to True, separators will be ignored if any are defined.', False, <class 'bool'>), 'init': ('Make the model walker ignore all the steps until the provided one', 1, <class 'int'>), 'leaf_determinism': ("If set to 'True', all the typed nodes of the model will be set to determinist mode prior to any fuzzing. If set to 'False', they will be set to random mode. Otherwise, if set to 'None', nothing will be done.", None, <class 'bool'>), 'leaf_fuzz_determinism': ("If set to 'True', each typed node will be fuzzed in a deterministic way. If set to 'False' each typed node will be fuzzed in a random way. Otherwise, if it is set to 'None', it will be guided by the data model determinism. Note: this option is complementary to 'determinism' as it acts on the typed node substitutions that occur through this disruptor", True, <class 'bool'>), 'make_determinist': ("If set to 'True', the whole model will be set in determinist mode.Otherwise it will be guided by the data model determinism.", False, <class 'bool'>), 'max_node_tc': ('Maximum number of test cases per node (-1 means until the end). This value is used for nodes with a fuzz weight strictly greater than 1.', -1, <class 'int'>), 'max_steps': ('Maximum number of steps (-1 means until the end)', -1, <class 'int'>), 'min_node_tc': ('Minimum number of test cases per node (-1 means until the end)', -1, <class 'int'>), 'order': ('When set to True, the fuzzing order is strictly guided by the data structure. Otherwise, fuzz weight (if specified in the data model) is used for ordering.', True, <class 'bool'>), 'path': ('Graph path regexp to select nodes on which the disruptor should apply.', None, <class 'str'>), 'sem': ('Semantics to select nodes on which the disruptor should apply.', None, (<class 'str'>, <class 'list'>))}
_modelwalker_user = True
disrupt_data(dm, target, data)

@data: it is either equal to prev_data the first time disrupt_data() is called by the FMK, or it is a an empty data (that is Data()).

set_seed(prev_data)
setup(dm, user_input)

–> Specific code return True if setup has succeeded, otherwise return False

class framework.generic_data_makers.sd_struct_constraints

Bases: framework.tactics_helpers.StatefulDisruptor

Perform constraints alteration (one at a time) on each node that depends on another one regarding its existence, its quantity, its size, …

If deep is set, enable more corruption cases on the data structure, based on the internals of each non-terminal node: - the minimum and maximum amount of the subnodes of each non-terminal nodes - …

__annotations__ = {}
__module__ = 'framework.generic_data_makers'
_args_desc = {'deep': ('If True, enable corruption of non-terminal node internals', False, <class 'bool'>), 'init': ('Make the model walker ignore all the steps until the provided one.', 1, <class 'int'>), 'max_steps': ('Maximum number of steps (-1 means until the end).', -1, <class 'int'>), 'path': ('Graph path regexp to select nodes on which the disruptor should apply.', None, <class 'str'>), 'sem': ('Semantics to select nodes on which the disruptor should apply.', None, (<class 'str'>, <class 'list'>))}
_modelwalker_user = False
disrupt_data(dm, target, data)

@data: it is either equal to prev_data the first time disrupt_data() is called by the FMK, or it is a an empty data (that is Data()).

set_seed(prev_data)
setup(dm, user_input)

–> Specific code return True if setup has succeeded, otherwise return False

class framework.generic_data_makers.sd_switch_to_alternate_conf

Bases: framework.tactics_helpers.StatefulDisruptor

Switch the configuration of each node, one by one, with the provided alternate configuration.

__annotations__ = {}
__module__ = 'framework.generic_data_makers'
_args_desc = {'clone_node': ('If True, this operator will always return a copy of the node. (for stateless diruptors dealing with big data it can be usefull to set it to False)', True, <class 'bool'>), 'conf': ('Change the configuration, with the one provided (by name), of all nodes reachable from the root, one-by-one. [default value is set dynamically with the first-found existing alternate configuration]', None, (<class 'str'>, <class 'list'>, <class 'tuple'>)), 'init': ('Make the model walker ignore all the steps until the provided one', 1, <class 'int'>), 'max_node_tc': ('Maximum number of test cases per node (-1 means until the end). This value is used for nodes with a fuzz weight strictly greater than 1.', -1, <class 'int'>), 'max_steps': ('Maximum number of steps (-1 means until the end)', -1, <class 'int'>), 'min_node_tc': ('Minimum number of test cases per node (-1 means until the end)', -1, <class 'int'>)}
_modelwalker_user = True
disrupt_data(dm, target, data)

@data: it is either equal to prev_data the first time disrupt_data() is called by the FMK, or it is a an empty data (that is Data()).

set_seed(prev_data)
setup(dm, user_input)

–> Specific code return True if setup has succeeded, otherwise return False

class framework.generic_data_makers.sd_walk_csp_solutions

Bases: framework.tactics_helpers.StatefulDisruptor

When the CSP (Constraint Satisfiability Problem) backend are used in the data description. This operator walk through the solutions of the CSP.

__annotations__ = {}
__module__ = 'framework.generic_data_makers'
_args_desc = {'clone_node': ('If True, this operator will always return a copy of the node. (for stateless diruptors dealing with big data it can be usefull to set it to False)', True, <class 'bool'>), 'init': ('Make the operator ignore all the steps until the provided one', 1, <class 'int'>), 'notify_exhaustion': ('When all the solutions of the CSP have been walked through, the disruptor will notify it if this parameter is set to True.', True, <class 'bool'>)}
_modelwalker_user = False
disrupt_data(dm, target, data)

@data: it is either equal to prev_data the first time disrupt_data() is called by the FMK, or it is a an empty data (that is Data()).

set_seed(prev_data)
setup(dm, user_input)

–> Specific code return True if setup has succeeded, otherwise return False

class framework.generic_data_makers.sd_walk_data_model

Bases: framework.tactics_helpers.StatefulDisruptor

Walk through the provided data and for each visited node, iterates over the allowed values (with respect to the data model). Note: no alteration is performed by this disruptor.

__annotations__ = {}
__module__ = 'framework.generic_data_makers'
_args_desc = {'clone_node': ('If True, this operator will always return a copy of the node. (for stateless diruptors dealing with big data it can be usefull to set it to False)', True, <class 'bool'>), 'consider_sibbling_change': ('While walking through terminal nodes, if sibbling nodes are no more the same because of existence condition for instance, walk through the new nodes.', True, <class 'bool'>), 'deep': ('When set to True, if a node structure has changed, the modelwalker will reset its walk through the children nodes.', True, <class 'bool'>), 'fix_all': ('For each produced data, reevaluate the constraints on the whole graph.', True, <class 'bool'>), 'full_combinatory': ('When set to True, enable full-combinatory mode for non-terminal nodes. It means that the non-terminal nodes will be customized in "FullCombinatory" mode', False, <class 'bool'>), 'ign_mutable_attr': ('Walk through all the nodes even if their Mutable attribute is cleared.', True, <class 'bool'>), 'init': ('Make the model walker ignore all the steps until the provided one', 1, <class 'int'>), 'leaf_determinism': ("If set to 'True', all the typed nodes of the model will be set to determinist mode prior to any fuzzing. If set to 'False', they will be set to random mode. Otherwise, if set to 'None', nothing will be done.", None, <class 'bool'>), 'max_node_tc': ('Maximum number of test cases per node (-1 means until the end). This value is used for nodes with a fuzz weight strictly greater than 1.', -1, <class 'int'>), 'max_steps': ('Maximum number of steps (-1 means until the end)', -1, <class 'int'>), 'min_node_tc': ('Minimum number of test cases per node (-1 means until the end)', -1, <class 'int'>), 'nt_only': ('Walk through non-terminal nodes only.', False, <class 'bool'>), 'order': ('When set to True, the walking order is strictly guided by the data structure. Otherwise, fuzz weight (if specified in the data model) is used for ordering.', True, <class 'bool'>), 'path': ('Graph path regexp to select nodes on which the disruptor should apply.', None, <class 'str'>), 'sem': ('Semantics to select nodes on which the disruptor should apply.', None, (<class 'str'>, <class 'list'>))}
_modelwalker_user = True
disrupt_data(dm, target, data)

@data: it is either equal to prev_data the first time disrupt_data() is called by the FMK, or it is a an empty data (that is Data()).

set_seed(prev_data)
setup(dm, user_input)

–> Specific code return True if setup has succeeded, otherwise return False

framework.generic_data_makers.truncate_info(info, max_size=60)

13.2.8. framework.target_helpers module

class framework.target_helpers.EmptyTarget(verbose=False)

Bases: framework.target_helpers.Target

__init__(verbose=False)
__module__ = 'framework.target_helpers'
_feedback_mode = 1
send_data(data, from_fmk=False)

To be overloaded.

Note: use data.to_bytes() to get binary data.

Parameters
  • from_fmk (bool) – set to True if the call was performed by the framework itself, otherwise the call comes from user-code (e.g., from a probe or an operator)

  • data (Data) – data container that embeds generally a modeled data accessible through data.content. However if the latter is None, it only embeds the raw data.

send_multiple_data(data_list, from_fmk=False)

Used to send multiple data to the target, or to stimulate several target’s inputs in one shot.

Note: Use data.to_bytes() to get binary data

Parameters
  • from_fmk (bool) – set to True if the call was performed by the framework itself, otherwise the call comes from user-code (e.g., from a Probe or an Operator)

  • data_list (list) – list of data to be sent

supported_feedback_mode = [1, 2]
class framework.target_helpers.Target(name=None, display_feedback=True)

Bases: object

Class abstracting the real target we interact with.

About feedback: Feedback retrieved from a real target has to be provided to the user (i.e., the framework) through either after Target.send_data() is called or when Target.collect_unsolicited_feedback() is called.

FBK_WAIT_FULL_TIME = 1
FBK_WAIT_UNTIL_RECV = 2
STATUS_THRESHOLD_FOR_RECOVERY = 0
__annotations__ = {}
__init__(name=None, display_feedback=True)
__module__ = 'framework.target_helpers'
__str__()

Return str(self).

_altered_data_queued = None
_extensions = None
_feedback_mode = None
_last_sending_date = None
_logger = None
_pending_data = None
_pending_data_id = None
_send_data_lock = <unlocked _thread.lock object>
_set_feedback_timeout_specific(fbk_timeout)

Overload this function to handle feedback specifics

Parameters

fbk_timeout (float) – time duration for collecting the feedback

_start(target_desc, tg_id)
_started = None
_stop(target_desc, tg_id)
add_extensions(probe)
add_pending_data(data)
cleanup()

To be overloaded if something needs to be performed after each data emission. It is called after any feedback has been retrieved.

collect_unsolicited_feedback(timeout=0)

If overloaded, it should collect any data from the associated real target that may be sent without solicitation (i.e. without any data sent through it) and make it available through the method .get_feedback()

Parameters

timeout – Maximum delay before returning from feedback collecting

Returns

False if it is not possible, otherwise it should be True

Return type

bool

del_extensions()
display_feedback = False
property extensions
property fbk_wait_full_time_slot_mode
fbk_wait_full_time_slot_msg = 'Wait for the full time slot allocated for feedback retrieval'
property fbk_wait_until_recv_mode
fbk_wait_until_recv_msg = 'Wait until the target has sent something back to us'
feedback_timeout = None
get_description()
static get_fbk_mode_desc(fbk_mode, short=False)
get_feedback()

If overloaded, should return a FeedbackCollector object.

get_last_target_ack_date()

If different from None the return value is used by the FMK to log the date of the target acknowledgment after a message has been sent to it.

[Note: If this method is overloaded, is_feedback_received() should also be]

is_feedback_received()

To be overloaded if the target implements FBK_WAIT_UNTIL_RECV mode, so that it can informs the framework about feedback reception.

is_processed_data_altered()
is_started()
is_target_ready_for_new_data()

To be overloaded if the target needs some time (for conditions to occur) before data can be sent. Note: The FMK busy wait on this method() before sending a new data.

name = None
record_info(info)

Can be used by the target to record some information during initialization or anytime it make sense for your purpose.

Parameters

info (str) – info to be recorded

Returns

None

recover_target()

Implementation of target recovering operations, when a target problem has been detected (i.e. a negative feedback from a probe, an operator or the Target() itself)

Returns

True if the target has been recovered. False otherwise.

Return type

bool

send_data(data, from_fmk=False)

To be overloaded.

Note: use data.to_bytes() to get binary data.

Parameters
  • from_fmk (bool) – set to True if the call was performed by the framework itself, otherwise the call comes from user-code (e.g., from a probe or an operator)

  • data (Data) – data container that embeds generally a modeled data accessible through data.content. However if the latter is None, it only embeds the raw data.

send_data_sync(data: framework.data.Data, from_fmk=False)

Can be used in user-code to send data to the target without interfering with the framework.

Use case example: The user needs to send some message to the target on a regular basis in background. For that purpose, it can quickly define a framework.monitor.Probe that just emits the message by itself.

send_multiple_data(data_list, from_fmk=False)

Used to send multiple data to the target, or to stimulate several target’s inputs in one shot.

Note: Use data.to_bytes() to get binary data

Parameters
  • from_fmk (bool) – set to True if the call was performed by the framework itself, otherwise the call comes from user-code (e.g., from a Probe or an Operator)

  • data_list (list) – list of data to be sent

send_multiple_data_sync(data_list, from_fmk=False)

Can be used in user-code to send data to the target without interfering with the framework.

send_pending_data(from_fmk=False)
sending_delay = 0
set_data_model(dm)
set_feedback_mode(mode)
set_feedback_timeout(fbk_timeout)

To set dynamically the feedback timeout.

Parameters

fbk_timeout (float) – maximum time duration for collecting the feedback

set_logger(logger)
set_project(prj)
set_sending_delay(sending_delay)

Set the sending delay.

Parameters

sending_delay (float) – maximum time (in seconds) taken to send data once the method send_(multiple_)data() has been called.

start()

To be overloaded if needed

stop()

To be overloaded if needed

supported_feedback_mode = [1, 2]
exception framework.target_helpers.TargetError

Bases: Exception

__module__ = 'framework.target_helpers'
exception framework.target_helpers.TargetNotReady

Bases: Exception

__module__ = 'framework.target_helpers'
exception framework.target_helpers.TargetStuck

Bases: Exception

__module__ = 'framework.target_helpers'

13.2.9. framework.targets.network module

class framework.targets.network.NetworkTarget(host='localhost', port=12345, socket_type=(<AddressFamily.AF_INET: 2>, <SocketKind.SOCK_STREAM: 1>), data_semantics='Unknown Semantic', server_mode=False, listen_on_start=True, target_address=None, wait_for_client=True, hold_connection=False, keep_first_client=True, mac_src=None, mac_dst=None, add_eth_header=False, fbk_timeout=2, fbk_mode=1, sending_delay=1, recover_timeout=0.5)

Bases: framework.target_helpers.Target

Generic target class for interacting with a network resource. Can be used directly, but some methods may require to be overloaded to fit your needs.

CHUNK_SZ = 2048
General_Info_ID = 'General Information'
UNKNOWN_SEMANTIC = 'Unknown Semantic'
_INTERNALS_ID = 'NetworkTarget()'
__annotations__ = {}
__init__(host='localhost', port=12345, socket_type=(<AddressFamily.AF_INET: 2>, <SocketKind.SOCK_STREAM: 1>), data_semantics='Unknown Semantic', server_mode=False, listen_on_start=True, target_address=None, wait_for_client=True, hold_connection=False, keep_first_client=True, mac_src=None, mac_dst=None, add_eth_header=False, fbk_timeout=2, fbk_mode=1, sending_delay=1, recover_timeout=0.5)
Parameters
  • host (str) – IP address of the target to connect to, or the IP address on which we will wait for target connecting to us (if server_mode is True). For raw socket type, it should contain the name of the interface.

  • port (int) – Port for communicating with the target, or the port to listen to. For raw socket type, it should contain the protocol ID.

  • socket_type (tuple) – Tuple composed of the socket address family and socket type

  • data_semantics (str) – String of characters that will be used for data routing decision. Useful only when more than one interface are defined. In such case, the data semantics will be checked in order to find a matching interface to which data will be sent. If the data have no semantic, it will be routed to the default first declared interface.

  • server_mode (bool) – If True, the interface will be set in server mode, which means we will wait for the real target to connect to us for sending it data.

  • listen_on_start (bool) – If True, servers will be launched right after the NetworkTarget starts. Otherwise, they will be launched in a lazy mode, meaning just when something is about to be sent through the server mode interface.

  • target_address (tuple) – Used only if server_mode is True and socket type is SOCK_DGRAM. To be used if data has to be sent to a specific address (which is not necessarily the client). It is especially useful if you need to send data before receiving anything. What should be provided is a tuple (host(str), port(int)) associated to the target.

  • wait_for_client (bool) – Used only in server mode (server_mode is True) when the socket type is SOCK_DGRAM and a target_address is provided, or when the socket_type is SOCK_RAW. If set to True, before sending any data, the NetworkTarget will wait for the reception of data (from any client); otherwise it will send data as soon as provided.

  • hold_connection (bool) – If True, we will maintain the connection while sending data to the real target. Otherwise, after each data emission, we close the related socket.

  • keep_first_client (bool) – Used only in server mode (server_mode is True) with SOCK_STREAM socket type. If set to True, the first client that connects to the server will remain the one used for data sending until the target is reloaded. Otherwise, last client information are used. This is not supported for SOCK_DGRAM where the first client will always be the one used for data sending.

  • mac_src (bytes) – Only in conjunction with raw socket. For each data sent through this interface, and if this data contain nodes with the semantic 'mac_src', these nodes will be overwritten (through absorption) with this parameter. If nothing is provided, the MAC address will be retrieved from the interface specified in ‘host’. (works accurately for Linux system).

  • mac_dst (bytes) – Only in conjunction with raw socket. For each data sent through this interface, and if this data contain nodes with the semantic 'mac_dst', these nodes will be overwritten (through absorption) with this parameter.

  • add_eth_header (bool) – Add an ethernet header to the data to send. Only possible in combination with a SOCK_RAW socket type.

  • fbk_timeout (float) – maximum time duration for collecting the feedback

  • sending_delay (float) – maximum time (in seconds) taken to send data once the method send_(multiple_)data() has been called.

  • recover_timeout (int) – Allowed delay for recovering the target. (the recovering can be triggered by the framework if the feedback threads did not terminate before the target health check) Impact the behavior of self.recover_target().

__module__ = 'framework.targets.network'
_before_sending_data(data_list, from_fmk)
_cleanup_state()
_collect_feedback_from(thread_id, fbk_sockets, fbk_ids, fbk_lengths, epobj, fileno2fd, fbk_timeout, flush_received_fbk, pre_fbk)
_connect_to_additional_feedback_sockets()

Connection to additional feedback sockets, if any.

_connect_to_target(host, port, socket_type)
_custom_data_handling_before_emission(data_list)

To be overloaded if you want to perform some operation before sending data_list to the target.

Parameters

data_list (list) – list of Data objects that will be sent to the target.

Returns

the data list to send

Return type

list

_feedback_collect(fbk, ref, error=0)
_feedback_complete()
_feedback_handling(fbk, ref)

To be overloaded if feedback from the target need to be filtered before being logged and/or collected in some way and/or for any other reasons.

Parameters
  • fbk (bytes) – feedback received by the target through a socket referenced by ref.

  • ref (string) – user-defined reference of the socket used to retrieve the feedback.

Returns

a tuple (new_fbk, status) where new_fbk is the feedback

you want to log and status is a status that enables you to notify a problem to the framework (should be positive if everything is fine, otherwise should be negative).

Return type

tuple

_feedback_mode = 1
_get_additional_feedback_sockets()

Used if any additional socket to get feedback from has been added by NetworkTarget.add_additional_feedback_interface(), related to the data emitted if needed.

Parameters

data (Data) – the data that will be sent.

Returns

list of sockets, dict of associated ids/names,

dict of associated length (a length can be None)

Return type

tuple

_get_data_semantic_key(data)
_get_net_info_from(data)
_get_socket_type(host, port)
_handle_connection_to_fbk_server(clientsocket, address, args, pre_fbk=None)
_handle_target_connection(clientsocket, address, args, pre_fbk=None)
_is_valid_socket_type(socket_type)
_listen_to_target(host, port, socket_type, func, args=None)
_raw_connect_to(host, port, ref_id, socket_type=(<AddressFamily.AF_INET: 2>, <SocketKind.SOCK_STREAM: 1>), chk_size=2048, hold_connection=True)
_raw_listen_to(host, port, ref_id, socket_type=(<AddressFamily.AF_INET: 2>, <SocketKind.SOCK_STREAM: 1>), chk_size=2048, wait_time=None)
_raw_server_main(serversocket, host, port, sock_type, func, sending_event, notif_host_event)
_register_last_ack_date(ack_date)
_send_data(sockets, data_refs, fbk_timeout, from_fmk, pre_fbk=None)
_server_main(serversocket, host, port, func)
_start_fbk_collector(fbk_sockets, fbk_ids, fbk_lengths, epobj, fileno2fd, pre_fbk=None, timeout=None, flush_received_fbk=False)
add_additional_feedback_interface(host, port, socket_type=(<AddressFamily.AF_INET: 2>, <SocketKind.SOCK_STREAM: 1>), fbk_id=None, fbk_length=None, server_mode=False, wait_time=None)

Allows to register additional socket to get feedback from. Connection is attempted be when target starts, that is when NetworkTarget.start() is called.

cleanup()

To be overloaded if something needs to be performed after each data emission. It is called after any feedback has been retrieved.

collect_unsolicited_feedback(timeout=0)

If overloaded, it should collect any data from the associated real target that may be sent without solicitation (i.e. without any data sent through it) and make it available through the method .get_feedback()

Parameters

timeout – Maximum delay before returning from feedback collecting

Returns

False if it is not possible, otherwise it should be True

Return type

bool

connect_to(host, port, ref_id, socket_type=(<AddressFamily.AF_INET: 2>, <SocketKind.SOCK_STREAM: 1>), chk_size=2048, hold_connection=True)

Used for collecting feedback from the target while it is already started.

get_description()
get_feedback()

If overloaded, should return a FeedbackCollector object.

get_last_target_ack_date()

If different from None the return value is used by the FMK to log the date of the target acknowledgment after a message has been sent to it.

[Note: If this method is overloaded, is_feedback_received() should also be]

initialize()

To be overloaded if some intial setup for the target is necessary.

is_feedback_received()

To be overloaded if the target implements FBK_WAIT_UNTIL_RECV mode, so that it can informs the framework about feedback reception.

is_target_ready_for_new_data()

To be overloaded if the target needs some time (for conditions to occur) before data can be sent. Note: The FMK busy wait on this method() before sending a new data.

listen_to(host, port, ref_id, socket_type=(<AddressFamily.AF_INET: 2>, <SocketKind.SOCK_STREAM: 1>), chk_size=2048, wait_time=None, hold_connection=True)

Used for collecting feedback from the target while it is already started.

recover_target()

Implementation of target recovering operations, when a target problem has been detected (i.e. a negative feedback from a probe, an operator or the Target() itself)

Returns

True if the target has been recovered. False otherwise.

Return type

bool

register_new_interface(host, port, socket_type, data_semantics, server_mode=False, target_address=None, wait_for_client=True, hold_connection=False, keep_first_client=True, mac_src=None, mac_dst=None, add_eth_header=False)
remove_all_dynamic_interfaces()
remove_dynamic_interface(host, port)
send_data(data, from_fmk=False)

To be overloaded.

Note: use data.to_bytes() to get binary data.

Parameters
  • from_fmk (bool) – set to True if the call was performed by the framework itself, otherwise the call comes from user-code (e.g., from a probe or an operator)

  • data (Data) – data container that embeds generally a modeled data accessible through data.content. However if the latter is None, it only embeds the raw data.

send_multiple_data(data_list, from_fmk=False)

Used to send multiple data to the target, or to stimulate several target’s inputs in one shot.

Note: Use data.to_bytes() to get binary data

Parameters
  • from_fmk (bool) – set to True if the call was performed by the framework itself, otherwise the call comes from user-code (e.g., from a Probe or an Operator)

  • data_list (list) – list of data to be sent

set_timeout(fbk_timeout, sending_delay)

Set the time duration for feedback gathering and the sending delay above which we give up: - sending data to the target (client mode) - waiting for client connections before sending data to them (server mode)

Parameters
  • fbk_timeout – time duration for feedback gathering (in seconds)

  • sending_delay – sending delay (in seconds)

start()

To be overloaded if needed

stop()

To be overloaded if needed

supported_feedback_mode = [1, 2]
terminate()

To be overloaded if some cleanup is necessary for stopping the target.

13.2.10. framework.targets.local module

class framework.targets.local.LocalTarget(target_path=None, pre_args=None, post_args=None, tmpfile_ext='.bin', send_via_stdin=False, send_via_cmdline=False, error_samples=None, error_parsing_func=<function LocalTarget.<lambda>>)

Bases: framework.target_helpers.Target

__annotations__ = {}
__init__(target_path=None, pre_args=None, post_args=None, tmpfile_ext='.bin', send_via_stdin=False, send_via_cmdline=False, error_samples=None, error_parsing_func=<function LocalTarget.<lambda>>)
__module__ = 'framework.targets.local'
_before_sending_data()
_feedback_mode = 2
cleanup()

To be overloaded if something needs to be performed after each data emission. It is called after any feedback has been retrieved.

get_description()
get_feedback(timeout=0.2)

If overloaded, should return a FeedbackCollector object.

get_post_args()
get_pre_args()
get_target_path()
initialize()

To be overloaded if some intial setup for the target is necessary.

send_data(data, from_fmk=False)

To be overloaded.

Note: use data.to_bytes() to get binary data.

Parameters
  • from_fmk (bool) – set to True if the call was performed by the framework itself, otherwise the call comes from user-code (e.g., from a probe or an operator)

  • data (Data) – data container that embeds generally a modeled data accessible through data.content. However if the latter is None, it only embeds the raw data.

set_post_args(post_args)
set_pre_args(pre_args)
set_target_path(target_path)
set_tmp_file_extension(tmpfile_ext)
start()

To be overloaded if needed

stop()

To be overloaded if needed

supported_feedback_mode = [2]
terminate()

To be overloaded if some cleanup is necessary for stopping the target.

13.2.11. framework.targets.sim module

13.2.12. framework.targets.ssh module

class framework.targets.ssh.SSHTarget(target_addr='localhost', port=12345, bind_address=None, username=None, password=None, pkey_path=None, pkey_password=None, proxy_jump_addr=None, proxy_jump_bind_addr=None, proxy_jump_port=None, proxy_jump_username=None, proxy_jump_password=None, proxy_jump_pkey_path=None, proxy_jump_pkey_password=None, targeted_command=None, file_parameter_path=None, fbk_timeout=0.5, read_stdout=True, read_stderr=True, char_mapping=False, get_pty=False, ref=None)

Bases: framework.target_helpers.Target

ASK_PASSWORD = 20
NO_PASSWORD = 10
STATUS_THRESHOLD_FOR_RECOVERY = -2
__annotations__ = {}
__init__(target_addr='localhost', port=12345, bind_address=None, username=None, password=None, pkey_path=None, pkey_password=None, proxy_jump_addr=None, proxy_jump_bind_addr=None, proxy_jump_port=None, proxy_jump_username=None, proxy_jump_password=None, proxy_jump_pkey_path=None, proxy_jump_pkey_password=None, targeted_command=None, file_parameter_path=None, fbk_timeout=0.5, read_stdout=True, read_stderr=True, char_mapping=False, get_pty=False, ref=None)

This generic target enables you to interact with a remote target requiring an SSH connection.

Parameters
  • target_addr – IP address to reach the SSH server

  • port – port on which the SSH server listen to.

  • bind_address – source address for communication.

  • username – username to use for the connection.

  • password – (optional) password related to the username. Could also be the special value SSHTarget.ASK_PASSWORD that will prompt the user for the password at the time of connection.

  • pkey_path – (optional) path to the private key related to the username (if no password provided).

  • pkey_password – (optional) if the private key is encrypted, this parameter can be either the password to decrypt it, or the special value SSHTarget.ASK_PASSWORD that will prompt the user for the password at the time of connection. If the private key is not encrypted, then this parameter should be set to SSHTarget.NO_PASSWORD

  • proxy_jump_addr – If a proxy jump has to be done before reaching the target, this parameter should be provided with the proxy address to connect with.

  • proxy_jump_bind_addr – internal address of the proxy to communication with the target.

  • proxy_jump_port – port on which the SSH server of the proxy listen to.

  • proxy_jump_username – username to use for the connection with the proxy.

  • proxy_jump_password – (optional) password related to the username. Could also be the special value SSHTarget.ASK_PASSWORD that will prompt the user for the password at the time of connection.

  • proxy_jump_pkey_path – (optional) path to the private key related to the username.

  • proxy_jump_pkey_password – (optional) if the private key is encrypted, this parameter can be either the password to decrypt it, or the special value SSHTarget.ASK_PASSWORD that will prompt the user for the password at the time of connection. If the private key is not encrypted, then this parameter should be set to SSHTarget.NO_PASSWORD.

  • targeted_command – If not None, it should be a format string taking one argument that will be automatically filled either with the data to be sent or with @file_parameter_path if it is not None (meaning the data have to be provided through a file).

  • file_parameter_path – If data should be provided to the targeted command through a file, then this parameter should provide the remote path where the data to be sent will be first copied into (otherwise it should remain equal to None). it will be provided as a parameter of @targeted_command.

  • fbk_timeout – delay for the framework to wait before it requests feedback from us.

  • read_stdout (bool) – If True, collect as feedback what the executed command will write in stdout.

  • read_stderr (bool) – If True, collect as feedback what the executed command will write in stderr.

  • char_mapping (dict) – If provided, specific characters in the payload will be replaced based on it.

  • get_pty (bool) – Request a pseudo-terminal from the server.

  • ref (str) – Reference for the target. Used for description only.

__module__ = 'framework.targets.ssh'
_set_feedback_timeout_specific(fbk_timeout)

Overload this function to handle feedback specifics

Parameters

fbk_timeout (float) – time duration for collecting the feedback

get_description()
get_last_target_ack_date()

If different from None the return value is used by the FMK to log the date of the target acknowledgment after a message has been sent to it.

[Note: If this method is overloaded, is_feedback_received() should also be]

is_feedback_received()

To be overloaded if the target implements FBK_WAIT_UNTIL_RECV mode, so that it can informs the framework about feedback reception.

recover_target()

Implementation of target recovering operations, when a target problem has been detected (i.e. a negative feedback from a probe, an operator or the Target() itself)

Returns

True if the target has been recovered. False otherwise.

Return type

bool

send_data(data, from_fmk=False)

To be overloaded.

Note: use data.to_bytes() to get binary data.

Parameters
  • from_fmk (bool) – set to True if the call was performed by the framework itself, otherwise the call comes from user-code (e.g., from a probe or an operator)

  • data (Data) – data container that embeds generally a modeled data accessible through data.content. However if the latter is None, it only embeds the raw data.

start()

To be overloaded if needed

stop()

To be overloaded if needed

13.2.13. framework.targets.printer module

class framework.targets.printer.PrinterTarget(tmpfile_ext)

Bases: framework.target_helpers.Target

__annotations__ = {}
__init__(tmpfile_ext)
__module__ = 'framework.targets.printer'
_feedback_mode = None
get_description()
get_printer_name()
get_target_ip()
get_target_port()
send_data(data, from_fmk=False)

To be overloaded.

Note: use data.to_bytes() to get binary data.

Parameters
  • from_fmk (bool) – set to True if the call was performed by the framework itself, otherwise the call comes from user-code (e.g., from a probe or an operator)

  • data (Data) – data container that embeds generally a modeled data accessible through data.content. However if the latter is None, it only embeds the raw data.

set_printer_name(printer_name)
set_target_ip(target_ip)
set_target_port(target_port)
set_tmp_file_extension(tmpfile_ext)
start()

To be overloaded if needed

supported_feedback_mode = []

13.2.14. framework.targets.debug module

exception framework.targets.debug.IncorrectTargetError

Bases: Exception

__module__ = 'framework.targets.debug'
exception framework.targets.debug.ShmemMappingError

Bases: Exception

__module__ = 'framework.targets.debug'
class framework.targets.debug.TestTarget(name=None, recover_ratio=100, fbk_samples=None, repeat_input=False, fbk_timeout=0.05, shmem_mode=False, shmem_timeout=10)

Bases: framework.target_helpers.Target

__annotations__ = {}
__init__(name=None, recover_ratio=100, fbk_samples=None, repeat_input=False, fbk_timeout=0.05, shmem_mode=False, shmem_timeout=10)
__module__ = 'framework.targets.debug'
_collect_fbk_loop()
_feedback_mode = 1
_forward_data()
_handle_fbk(data)
_last_ack_date = None
_map_input_shmem()
add_binding(target)
add_feedback_sources(*targets)
consumer_start = 6
consumer_stop = 15
data_start = 16
dlen_format = '>L'
dlen_start = 0
dlen_stop = 4
get_consumer_idx()
get_feedback()

If overloaded, should return a FeedbackCollector object.

get_last_target_ack_date()

If different from None the return value is used by the FMK to log the date of the target acknowledgment after a message has been sent to it.

[Note: If this method is overloaded, is_feedback_received() should also be]

is_feedback_received()

To be overloaded if the target implements FBK_WAIT_UNTIL_RECV mode, so that it can informs the framework about feedback reception.

is_target_ready_for_new_data()

To be overloaded if the target needs some time (for conditions to occur) before data can be sent. Note: The FMK busy wait on this method() before sending a new data.

max_consumer = 10
meta_data_size = 16
producer_status_idx = 5
recover_target()

Implementation of target recovering operations, when a target problem has been detected (i.e. a negative feedback from a probe, an operator or the Target() itself)

Returns

True if the target has been recovered. False otherwise.

Return type

bool

send_data(data, from_fmk=False)

To be overloaded.

Note: use data.to_bytes() to get binary data.

Parameters
  • from_fmk (bool) – set to True if the call was performed by the framework itself, otherwise the call comes from user-code (e.g., from a probe or an operator)

  • data (Data) – data container that embeds generally a modeled data accessible through data.content. However if the latter is None, it only embeds the raw data.

send_multiple_data(data_list, from_fmk=False)

Used to send multiple data to the target, or to stimulate several target’s inputs in one shot.

Note: Use data.to_bytes() to get binary data

Parameters
  • from_fmk (bool) – set to True if the call was performed by the framework itself, otherwise the call comes from user-code (e.g., from a Probe or an Operator)

  • data_list (list) – list of data to be sent

set_control_delay(delay)
set_control_over(*test_targets, feedback_filter=<function TestTarget.<lambda>>)
shmem_size = 4096
start()

To be overloaded if needed

stop()

To be overloaded if needed

supported_feedback_mode = [2, 1]

13.2.15. framework.project module

class framework.project.Project(enable_fbk_processing=True, wkspace_enabled=True, wkspace_size=1000, wkspace_free_slot_ratio_when_full=0.5, fmkdb_enabled=True, default_fbk_timeout=None, default_fbk_mode=None, default_sending_delay=None, default_burst_value=None)

Bases: object

__init__(enable_fbk_processing=True, wkspace_enabled=True, wkspace_size=1000, wkspace_free_slot_ratio_when_full=0.5, fmkdb_enabled=True, default_fbk_timeout=None, default_fbk_mode=None, default_sending_delay=None, default_burst_value=None)
Parameters
  • enable_fbk_processing – enable or disable the execution of feedback handlers, if any are set in the project.

  • wkspace_enabled – If set to True, enable the framework workspace that store the generated data.

  • wkspace_size – Maximum number of data that can be stored in the workspace.

  • wkspace_free_slot_ratio_when_full – when the workspace is full, provide the ratio of the workspace size that will be used as the amount of entries to free in the workspace.

  • fmkdb_enabled – If set to True, the fmkDB will be used. Otherwise, no DB transactions will occur and thus the fmkDB won’t be filled during the session.

  • default_fbk_timeout – If not None, when the project will be run, this value will be used to initialize the feedback timeout of all the targets

  • default_fbk_mode – If not None, when the project will be run, this value will be used to initialize the feedback mode of all the targets

  • default_sending_delay – If not None, when the project will be run, this value will be used to initialize the delay that is applied by the framework between each data sending.

  • default_burst_value – If not None, when the project will be run, this value will be used to initialize the burst value of the framework (number of data that can be sent in burst before a delay is applied).

__module__ = 'framework.project'
_feedback_processing()

core function of the feedback processing thread

add_knowledge(*info)
default_dm = None
disable_feedback_handlers()
enable_feedback_handlers()
estimate_last_data_impact_uniqueness()
get_operator(name)
get_operators()
get_probes()
property knowledge_source
map_targets_to_scenario(scenario, target_mapping)
name = None
notify_data_sending(data_list, timestamp, target)
register_evolutionary_processes(*processes)
register_feedback_handler(fbk_handler)
register_operator(name, obj)
register_probe(probe, blocking=False)
register_scenarios(*scenarios)
reset_knowledge()
reset_target_mappings()
set_data_model(dm)
set_exportable_fmk_ops(fmkops)
set_logger(logger)
set_monitor(monitor)
set_targets(targets)
share_knowlegde_source()
start()
stop()
trigger_feedback_handlers(source, timestamp, content, status)
wkspace_enabled = None
wkspace_free_slot_ratio_when_full = None
wkspace_size = None

13.2.16. framework.operator_helpers module

class framework.operator_helpers.LastInstruction

Bases: object

RecordData = 1
__init__()
__module__ = 'framework.operator_helpers'
get_comments()
get_operator_feedback()
get_operator_status()
get_timestamp()
is_instruction_set(name)
set_comments(comments)
set_instruction(name)
set_operator_feedback(info)
set_operator_status(status_code)
class framework.operator_helpers.Operation

Bases: object

CleanupDMakers = 3
Exportable = 2
Stop = 1
__init__()
__module__ = 'framework.operator_helpers'
add_instruction(actions, seed=None, tg_ids=None)
get_instructions()
is_flag_set(name)
set_flag(name)
set_status(status)
class framework.operator_helpers.Operator

Bases: object

__module__ = 'framework.operator_helpers'
__str__()

Return str(self).

_args_desc = None
_start(fmk_ops, dm, monitor, target, logger, user_input)
do_after_all(fmk_ops, dm, monitor, target, logger)

This action is executed after data has been sent to the target AND that all blocking probes have returned. BUT just before data is logged.

Returns

Last-minute instructions you request fuddly

to perform.

Return type

LastInstruction

plan_next_operation(fmk_ops, dm, monitor, target, logger, fmk_feedback)

Shall return a Operation object that contains the operations that you want fuddly to perform.

Returns

Operation you want fuddly to perform.

Return type

Operation

start(fmk_ops, dm, monitor, target, logger, user_input)

To be overloaded if specific initialization code is needed. Shall return True if setup has succeeded, otherwise shall return False.

stop(fmk_ops, dm, monitor, target, logger)

To be overloaded if specific termination code is needed.

framework.operator_helpers.operator(prj, args=None)

13.2.17. framework.logger module

class framework.logger.Logger(name=None, prefix='', record_data=False, explicit_data_recording=False, export_raw_data=True, term_display_limit=800, enable_term_display=True, enable_file_logging=False, highlight_marked_nodes=False)

Bases: object

The Logger is used for keeping the history of the communication with the Target. The methods are used by the framework, but can also be leveraged by an Operator.

FLUSH_API = 1
PRETTY_PRINT_API = 3
PRINT_CONSOLE_API = 4
WRITE_API = 2
__init__(name=None, prefix='', record_data=False, explicit_data_recording=False, export_raw_data=True, term_display_limit=800, enable_term_display=True, enable_file_logging=False, highlight_marked_nodes=False)
Parameters
  • name (str) – Name to be used in the log filenames. If not specified, the name of the project in which the logger is embedded will be used.

  • record_data (bool) – If True, each emitted data will be stored in a specific file within exported_data/.

  • explicit_data_recording (bool) – Used for logging outcomes further to an Operator instruction. If True, the operator would have to state explicitly if it wants the just emitted data to be recorded. Such notification is possible when the framework call its method framework.operator_helpers.Operator.do_after_all(), where the Operator can take its decision after the observation of the target feedback and/or probes outputs.

  • export_raw_data (bool) – If True, will log the data as it is, without trying to interpret it as human readable text.

  • term_display_limit (int) – maximum amount of characters to display on the terminal at once. If this threshold is overrun, the message to print on the console will be truncated.

  • enable_term_display (bool) – If True, information will be displayed on the terminal

  • prefix (str) – prefix to use for printing on the console.

  • enable_file_logging (bool) – If True, file logging will be enabled.

  • highlight_marked_nodes (bool) – If True, alteration performed by compatible disruptors will be highlighted. Only possible if export_raw_data is False, as this option forces data interpretation.

__module__ = 'framework.logger'
__str__()

Return str(self).

_encode_target_feedback(feedback)
_export_data_func(data, suffix='')
_handle_binary_content(content, raw=False)
_log_feedback(source, content, status_code, timestamp, record=True)
_log_handler()
_print_console(msg, nl_before=True, nl_after=False, rgb=None, style=None, raw_limit=None, limit_output=True, no_format_mode=False)
_process_target_feedback(feedback)
_stop_log_handler()
collect_feedback(content, status_code=None, subref=None, fbk_src=None)

Used within the scope of the Logger feedback-collector infrastructure. If your target implement the interface Target.get_feedback(), no need to use this infrastructure.

To be called by the target each time feedback need to be registered.

Parameters
  • content – feedback record

  • status_code (int) – should be negative for error

  • subref (str) – specific reference to distinguish internal log sources within the same caller

  • fbk_src – [optional] source object of the feedback

commit_data_table_entry(group_id, prj_name)
flush()
fmkDB = None
log_async_data(data_list: Union[framework.data.Data, List[framework.data.Data], Tuple[framework.data.Data]], sent_date, target_ref, prj_name, current_data_id)
log_collected_feedback(preamble=None, epilogue=None)

Used within the scope of the Logger feedback-collector feature. If your target implement the interface Target.get_feedback(), no need to use this infrastructure.

It allows to retrieve the collected feedback, that has been populated by the target (through call to Logger.collect_feedback()).

Parameters
  • preamble (str) – prefix added to each collected feedback

  • epilogue (str) – suffix added to each collected feedback

Returns

True if target feedback has been collected through logger infrastructure

Logger.collect_feedback(), False otherwise.

Return type

bool

log_comment(comment)
log_data(data, verbose=False)
log_data_info(data_info, dmaker_type, data_maker_name)
log_disruptor_info(dmaker_type, name, user_input)
log_dmaker_step(num)
log_error(err_msg)
log_fmk_info(info, nl_before=False, nl_after=False, rgb=6750207, data_id=None, do_show=True, do_record=True, delay_recording=False)
log_generator_info(dmaker_type, name, user_input, data_id=None, disabled=False)
log_info(info)
log_operator_feedback(operator, content, status_code, timestamp)
log_probe_feedback(probe, content, status_code, timestamp, related_tg=None)
log_target_ack_date()
log_target_feedback_from(source, content, status_code, timestamp, preamble=None, epilogue=None)
pretty_print_data(data: framework.data.Data, fd=None, raw_limit: Optional[int] = None)
print_console(msg, nl_before=True, nl_after=False, rgb=None, style=None, raw_limit=None, limit_output=True, no_format_mode=False)
reset_current_state()
set_external_display(disp)
set_target_ack_date(tg_ref, date)
shall_record()
start()
start_new_log_entry(preamble='')
stop()
wait_for_sync()
write(data: str)

13.2.18. framework.monitor module

exception framework.monitor.AddExistingProbeToMonitorError(probe_name)

Bases: Exception

Raised when a probe is being added a second time in a monitor

__init__(probe_name)
__module__ = 'framework.monitor'
property probe_name
class framework.monitor.BlockingProbeUser(probe, after_target_feedback_retrieval)

Bases: framework.monitor.ProbeUser

__init__(probe, after_target_feedback_retrieval)
__module__ = 'framework.monitor'
_clear()

Clear all events

_notify_armed()
_notify_status_retrieved()
_run(*args, **kwargs)
_wait_for_data_ready()

Wait on a request to arm

Returns

True if the arm event happened, False if a stop was asked

or an error was signaled

Return type

bool

_wait_for_fmk_sync()

Wait on a blocking event: data sent or timeout

Returns

True if the blocking event happened, False if a stop was

asked or an error was signaled

Return type

bool

property after_target_feedback_retrieval
notify_blocking()
notify_data_ready()
notify_error()

Informs the probe of an error

stop()
wait_until_armed(timeout=None)
wait_until_ready(timeout=None)
class framework.monitor.Monitor

Bases: object

__init__()
__module__ = 'framework.monitor'
_get_probe_ref(probe)
_wait_for_specific_probes(probe_user_class, probe_user_wait_method, probes=None, timeout=None)

Wait for probes to trigger a specific event

Parameters
  • probe_user_class (ProbeUser) – probe_user class that defines the method.

  • probe_user_wait_method (method) – name of the probe_user’s method that will be used to wait.

  • probes (list of ProbeUser) – probes to wait for. If None all probes will be concerned.

  • timeout (float) – maximum time to wait for in seconds.

add_probe(probe, blocking=False, after_target_feedback_retrieval=False)
configure_probe(probe, *args)
disable_hooks()
enable_hooks()
get_probe_delay(probe)
get_probe_status(probe)
get_probes_names()
is_probe_launched(probe)
is_probe_stuck(probe)
is_target_ok()
iter_probes()
notify_data_sending_event()
notify_error()
notify_imminent_data_sending()
notify_target_feedback_retrieval()
set_data_model(dm)
set_fmk_ops(fmk_ops)
set_logger(logger)
set_probe_delay(probe, delay)
set_targets(targets)
start()
start_probe(probe, related_tg=None)
stop()
stop_all_probes()
stop_probe(probe)
property target_status
wait_for_probe_initialization()
wait_for_probe_status_retrieval()
class framework.monitor.Probe(delay=1.0)

Bases: object

__init__(delay=1.0)
__module__ = 'framework.monitor'
__str__()

Return str(self).

_start(dm, target, logger)
_stop(dm, target, logger)
arm(dm, target, logger)

Only used by blocking probes. Called by the framework just before sending a data.

Parameters
  • dm – the current data model

  • target – the current target

  • logger – the current logger

configure(*args)

(Optional method) To be overloaded with any signature that fits your needs Could be called by user code through framework.monitor.Monitor.configure_probe() Use case example is to call it from an framework.operator_helpers.Operator

Parameters

*args – anything that fits your needs

property delay
main(dm, target, logger)

To be overloaded by user-code

In the case of a basic probe, this method will be called in loop following a period specified within the associated project file.

In the case of a blocking probe, this method will be called by the framework just after having sent a data (or a batch of data).

Parameters
  • dm – the current data model

  • target – the current target

  • logger – the current logger

Returns

negative status if something is wrong

Return type

ProbeStatus

reset()

To be overloaded by user-code (if needed).

Called each time the probe status is retrieved by the framework (through Monitor.get_probe_status()). Useful especially for periodic probe that may need to be reset after each data sending.

Note: shall be stateless and reentrant.

start(dm, target, logger)

Probe initialization

Returns

may return a status or None

Return type

ProbeStatus

property status
stop(dm, target, logger)
class framework.monitor.ProbeCmd

Bases: framework.monitor.Probe

Generic probe that enables you to execute shell commands and retrieve the output.

The monitoring can be done through different backend (e.g., SSH_Backend, Serial_Backend).

backend

backend to be used (e.g., SSH_Backend).

Type

framework.comm_backends.Backend

init_command

ssh command to execute at init

Type

str

recurrent_command

ssh command to execute at each probing

Type

str

__annotations__ = {}
__init__()
__module__ = 'framework.monitor'
backend = None
init_command = None
main(dm, target, logger)

To be overloaded by user-code

In the case of a basic probe, this method will be called in loop following a period specified within the associated project file.

In the case of a blocking probe, this method will be called by the framework just after having sent a data (or a batch of data).

Parameters
  • dm – the current data model

  • target – the current target

  • logger – the current logger

Returns

negative status if something is wrong

Return type

ProbeStatus

recurrent_command = None
start(dm, target, logger)

Probe initialization

Returns

may return a status or None

Return type

ProbeStatus

stop(dm, target, logger)
class framework.monitor.ProbeMem

Bases: framework.monitor.Probe

Generic probe that enables you to monitor the process memory (RSS…) consumption. It can be done by specifying a threshold and/or a tolerance ratio.

The monitoring can be done through different backend (e.g., SSH_Backend, Serial_Backend).

backend

backend to be used (e.g., SSH_Backend).

Type

framework.comm_backends.Backend

process_name

name of the process to monitor.

Type

str

threshold

memory (RSS) threshold that the monitored process should not exceed. (dimension should be the same as what is provided by the ps command of the system under test)

Type

int

tolerance

tolerance expressed in percentage of the memory (RSS) the process was using at the beginning of the monitoring (or after each time the tolerance has been exceeded).

Type

int

command_pattern

format string for the ssh command. ‘{0:s}’ refer to the process name.

Type

str

__annotations__ = {}
__init__()
__module__ = 'framework.monitor'
_get_mem()
backend = None
command_pattern = 'ps -e -orss,comm | grep {0:s}'
main(dm, target, logger)

To be overloaded by user-code

In the case of a basic probe, this method will be called in loop following a period specified within the associated project file.

In the case of a blocking probe, this method will be called by the framework just after having sent a data (or a batch of data).

Parameters
  • dm – the current data model

  • target – the current target

  • logger – the current logger

Returns

negative status if something is wrong

Return type

ProbeStatus

process_name = None
reset()

To be overloaded by user-code (if needed).

Called each time the probe status is retrieved by the framework (through Monitor.get_probe_status()). Useful especially for periodic probe that may need to be reset after each data sending.

Note: shall be stateless and reentrant.

start(dm, target, logger)

Probe initialization

Returns

may return a status or None

Return type

ProbeStatus

stop(dm, target, logger)
threshold = None
tolerance = 2
class framework.monitor.ProbePID

Bases: framework.monitor.Probe

Generic probe that enables you to monitor a process PID.

The monitoring can be done through different backend (e.g., SSH_Backend, Serial_Backend).

backend

backend to be used (e.g., SSH_Backend).

Type

framework.comm_backends.Backend

process_name

name of the process to monitor.

Type

str

max_attempts

maximum number of attempts for getting the process ID.

Type

int

delay_between_attempts

delay in seconds between each attempt.

Type

float

delay

delay before retrieving the process PID.

Type

float

command_pattern

format string for the ssh command. ‘{0:s}’ refer to the process name.

Type

str

__annotations__ = {}
__init__()
__module__ = 'framework.monitor'
_get_pid(logger)
backend = None
command_pattern = 'pgrep {0:s}'
delay = 0.5
delay_between_attempts = 0.1
main(dm, target, logger)

To be overloaded by user-code

In the case of a basic probe, this method will be called in loop following a period specified within the associated project file.

In the case of a blocking probe, this method will be called by the framework just after having sent a data (or a batch of data).

Parameters
  • dm – the current data model

  • target – the current target

  • logger – the current logger

Returns

negative status if something is wrong

Return type

ProbeStatus

max_attempts = 10
process_name = None
start(dm, target, logger)

Probe initialization

Returns

may return a status or None

Return type

ProbeStatus

stop(dm, target, logger)
class framework.monitor.ProbeStatus(status=None, info=None)

Bases: object

__init__(status=None, info=None)
__module__ = 'framework.monitor'
get_private_info()
get_timestamp()
set_private_info(pv)
set_timestamp()
property value
exception framework.monitor.ProbeTimeoutError(probe_name, timeout, blocking_methods=None)

Bases: Exception

Raised when a probe is considered stuck

__init__(probe_name, timeout, blocking_methods=None)
Parameters
  • probe_name (str) – name of the probe where the timeout occurred

  • timeout (float) – time the probe waited before its timeout

  • blocking_methods (list of str) – list of probe_methods where the timeout may have happened

__module__ = 'framework.monitor'
property blocking_methods
property probe_name
property timeout
class framework.monitor.ProbeUser(probe)

Bases: object

__annotations__ = {}
__init__(probe)
__module__ = 'framework.monitor'
_clear()

Clear all events

_go_on()
_handle_exception(context)
_notify_probe_started()
_run(*args, **kwargs)
_wait(delay)
_wait_for_probe(event, timeout=None)

Wait for the probe to trigger a specific event

get_probe_delay()
get_probe_status()
is_alive()
is_stuck()

Tells if the probe has to be considered stuck by the monitor: i.e. if it is really stuck or if its stop was not acknowledged

join(timeout=None)
property probe
probe_init_timeout = 15.0
set_probe_delay(delay)
start(*args, **kwargs)
stop()
timeout = 5.0
wait_for_probe_init(timeout=None)
framework.monitor.blocking_probe(project, after_target_feedback_retrieval=False)
framework.monitor.probe(project)

13.2.19. framework.comm_backends module

class framework.comm_backends.Backend(codec='latin_1')

Bases: object

__init__(codec='latin_1')
Parameters

codec (str) – codec used by the monitored system to answer.

__module__ = 'framework.comm_backends'
_exec_command(cmd)
Parameters

cmd – command to execute through the communication channel

Returns: list of file descriptors (e.g., stdout, stderr)

_start()
_stop()
exec_command(cmd)
read_output(chan_desc)
Parameters

chan_desc – object returned by Backend.exec_command() that enables to gather output data

Returns

data retrieved through the communication channel

Return type

bytes

start()
stop()
exception framework.comm_backends.BackendError(msg, status=- 1)

Bases: Exception

__init__(msg, status=- 1)
__module__ = 'framework.comm_backends'
class framework.comm_backends.SSH_Backend(target_addr='localhost', port=22, bind_address=None, username=None, password=None, pkey_path=None, pkey_password=10, proxy_jump_addr=None, proxy_jump_bind_addr=None, proxy_jump_port=None, proxy_jump_username=None, proxy_jump_password=None, proxy_jump_pkey_path=None, proxy_jump_pkey_password=10, codec='latin-1', timeout=None, get_pty=False)

Bases: framework.comm_backends.Backend

ASK_PASSWORD = 20

Backend to execute command through a serial line.

NO_PASSWORD = 10
__annotations__ = {}
__init__(target_addr='localhost', port=22, bind_address=None, username=None, password=None, pkey_path=None, pkey_password=10, proxy_jump_addr=None, proxy_jump_bind_addr=None, proxy_jump_port=None, proxy_jump_username=None, proxy_jump_password=None, proxy_jump_pkey_path=None, proxy_jump_pkey_password=10, codec='latin-1', timeout=None, get_pty=False)
Parameters
  • target_addr (str) – IP of the SSH server.

  • port (int) – port of the SSH server.

  • username (str) – username to connect with.

  • password (str) – (optional) password related to the username. Could also be the special value SSHTarget.ASK_PASSWORD that will prompt the user for the password at the time of connection.

  • pkey_path (str) – (optional) path of the private key (if no password provided).

  • pkey_password – (optional) if the private key is encrypted, this parameter can be either the password to decrypt it, or the special value SSHTarget.ASK_PASSWORD that will prompt the user for the password at the time of connection. If the private key is not encrypted, then this parameter should be set to SSHTarget.NO_PASSWORD

  • proxy_jump_addr – If a proxy jump has to be done before reaching the target, this parameter should be provided with the proxy address to connect with.

  • proxy_jump_bind_addr – internal address of the proxy to communication with the target.

  • proxy_jump_port – port on which the SSH server of the proxy listen to.

  • proxy_jump_username – username to use for the connection with the proxy.

  • proxy_jump_password – (optional) password related to the username. Could also be the special value SSHTarget.ASK_PASSWORD that will prompt the user for the password at the time of connection.

  • proxy_jump_pkey_path – (optional) path to the private key related to the username.

  • proxy_jump_pkey_password – (optional) if the private key is encrypted, this parameter can be either the password to decrypt it, or the special value SSHTarget.ASK_PASSWORD that will prompt the user for the password at the time of connection. If the private key is not encrypted, then this parameter should be set to SSHTarget.NO_PASSWORD.

  • codec (str) – codec used by the monitored system to answer.

  • timeout (float) – timeout on blocking read/write operations. None disables timeouts on socket operations

  • get_pty (bool) – Request a pseudo-terminal from the server. It implies that processes executed from this ssh session will be attached to the pty and will be killed once the session is closed. (Otherwise they could remain on the server.)

__module__ = 'framework.comm_backends'
static _create_pkey(pkey_path, pkey_password, prompt='PKey Password:')
_exec_command(cmd)
Parameters

cmd – command to execute through the communication channel

Returns: list of file descriptors (e.g., stdout, stderr)

_read_fd(fdesc)
_start()
_stop()
read_output(chan_desc)
Parameters

chan_desc – object returned by Backend.exec_command() that enables to gather output data

Returns

data retrieved through the communication channel

Return type

bytes

read_stderr(chan_desc)
read_stdout(chan_desc)
set_timeout(timeout)
class framework.comm_backends.Serial_Backend(serial_port, baudrate=115200, bytesize=8, parity='N', stopbits=1, xonxoff=False, rtscts=False, dsrdtr=False, username=None, password=None, slowness_factor=5, cmd_notfound=b'command not found', codec='latin-1')

Bases: framework.comm_backends.Backend

Backend to execute command through a serial line.

__annotations__ = {}
__init__(serial_port, baudrate=115200, bytesize=8, parity='N', stopbits=1, xonxoff=False, rtscts=False, dsrdtr=False, username=None, password=None, slowness_factor=5, cmd_notfound=b'command not found', codec='latin-1')
Parameters
  • serial_port (str) – path to the tty device file. (e.g., ‘/dev/ttyUSB0’)

  • baudrate (int) – baud rate of the serial line.

  • bytesize (int) – number of data bits. (5, 6, 7, or 8)

  • parity (str) – parity checking. (‘N’, ‘O, ‘E’, ‘M’, or ‘S’)

  • stopbits (int) – number of stop bits. (1, 1.5 or 2)

  • xonxoff (bool) – enable software flow control.

  • rtscts (bool) – enable hardware (RTS/CTS) flow control.

  • dsrdtr (bool) – enable hardware (DSR/DTR) flow control.

  • username (str) – username to connect with. If None, no authentication step will be attempted.

  • password (str) – password related to the username.

  • slowness_factor (int) – characterize the slowness of the monitored system. The scale goes from 1 (fastest) to 10 (slowest). This factor is a base metric to compute the time to wait for the authentication step to terminate (if username and password parameter are provided) and other operations involving to wait for the monitored system.

  • cmd_notfound (bytes) – pattern used to detect if the command does not exist on the monitored system.

  • codec (str) – codec used to send/receive information through the serial line

__module__ = 'framework.comm_backends'
_exec_command(cmd)
Parameters

cmd – command to execute through the communication channel

Returns: list of file descriptors (e.g., stdout, stderr)

_read_serial(serial_chan, duration)
_start()
_stop()
read_output(chan_desc)
Parameters

chan_desc – object returned by Backend.exec_command() that enables to gather output data

Returns

data retrieved through the communication channel

Return type

bytes

class framework.comm_backends.Shell_Backend(timeout=None, codec='latin-1')

Bases: framework.comm_backends.Backend

Backend to execute shell commands locally

__annotations__ = {}
__init__(timeout=None, codec='latin-1')
Parameters
  • timeout (float) – timeout in seconds for reading the result of the command

  • codec (str) – codec used by the monitored system to answer.

__module__ = 'framework.comm_backends'
_exec_command(cmd)
Parameters

cmd – command to execute through the communication channel

Returns: list of file descriptors (e.g., stdout, stderr)

_start()
_stop()
read_output(chan_desc)
Parameters

chan_desc – object returned by Backend.exec_command() that enables to gather output data

Returns

data retrieved through the communication channel

Return type

bytes

13.2.20. framework.tactics_helpers module

class framework.tactics_helpers.DataMaker

Bases: object

__annotations__ = {}
__init__()
__module__ = 'framework.tactics_helpers'
_args_desc = None
_modelwalker_user = False
knowledge_source = None
property modelwalker_user
related_dm_name = None
set_exportable_fmk_ops(fmkops)
class framework.tactics_helpers.DataMakerAttr

Bases: object

Active = 1
Controller = 2
HandOver = 3
NeedSeed = 5
SetupRequired = 4
__module__ = 'framework.tactics_helpers'
class framework.tactics_helpers.Disruptor

Bases: framework.tactics_helpers.DataMaker

__annotations__ = {}
__init__()
__module__ = 'framework.tactics_helpers'
_cleanup()
_setup(dm, user_input)
cleanup(fmkops)

–> Specific code

clear_attr(name)
disrupt_data(dm, target, prev_data)
is_attr_set(name)
set_attr(name)
setup(dm, user_input)

–> Specific code return True if setup has succeeded, otherwise return False

class framework.tactics_helpers.DynGenerator

Bases: framework.tactics_helpers.Generator

__annotations__ = {}
__module__ = 'framework.tactics_helpers'
_args_desc = {'determinist': ("Make the data model determinist if set to 'True', random if set to 'False', or do nothing if set to 'None'", None, <class 'bool'>), 'finite': ('Make the data model finite', False, <class 'bool'>), 'freeze': ('Freeze the generated node.', False, <class 'bool'>), 'leaf_determinism': ("If set to 'True', all the typed nodes of the model will be set to determinist mode prior to any fuzzing. If set to 'False', they will be set to random mode. Otherwise, if set to 'None', nothing will be done.", None, <class 'bool'>), 'min_def': ("Set the default quantity of all the nodes to the defined minimum quantity if this parameter is set to 'True', or maximum quantity if set to 'False'. Otherwise if set to 'None', nothing is done.", None, <class 'bool'>), 'resolve_csp': ('Resolve any CSP if any', True, <class 'bool'>)}
data_id = ''
generate_data(dm, monitor, target)
setup(dm, user_input)

–> Specific code return True if setup has succeeded, otherwise return False

class framework.tactics_helpers.DynGeneratorFromScenario

Bases: framework.tactics_helpers.Generator

__annotations__ = {}
__handle_transition_callbacks(hook, feedback=None)
__module__ = 'framework.tactics_helpers'
_alter_data_step()
_alter_transition_conditions()
_args_desc = {'cond_fuzz': ('For each scenario step having guarded transitions, a new scenario is created where transition conditions are inverted. [compatible with ignore_timing]', False, <class 'bool'>), 'data_fuzz': ('For each scenario step that generates data, a new scenario is created where the data generated by the step is fuzzed.', False, <class 'bool'>), 'graph': ('Display the scenario and highlight the current step each time the generator is called.', False, <class 'bool'>), 'graph_format': ('Format to be used for displaying the scenario (e.g., xdot, pdf, png).', 'xdot', <class 'str'>), 'ignore_timing': ('For each scenario step enforcing a timing constraint, a new scenario is created where any timeout conditions are removed (i.e., set to 0 second). [compatible with cond_fuzz]', False, <class 'bool'>), 'init': ("Used in combination with 'data_fuzz', 'cond_fuzz', or 'ignore_timing'. Make the generator begin with the Nth corrupted scenario (where N is provided through this parameter).", 0, <class 'int'>), 'reset': ("If set, scenarios created by 'data_fuzz', 'cond_fuzz', or 'ignore_timing' will reinitialize the scenario after each corruption case, without waiting for the normal continuation of the scenario.", True, <class 'bool'>), 'stutter': ("For each scenario step that generates data, a new scenario is created where the step is altered to stutter 'stutter_max' times, meaning that data-sending steps would be triggered 'stutter_max' times.", False, <class 'bool'>), 'stutter_max': ("The number of times a step will stutter [to be used with 'stutter']", 2, <class 'int'>)}
_callback_dispatcher_after_fbk(fbk)

This callback is always called by the framework It allows for a NoDataStep to perform actions (trigger periodic data, tasks, …)

_callback_dispatcher_after_sending()
_callback_dispatcher_before_sending_step1()
_callback_dispatcher_before_sending_step2()
_callback_dispatcher_final()
_check_data_fuzz_completion_cbk(env, step)
_cleanup_walking_attrs()
_make_step_stutter()
_stutter_cbk(env, current_step, next_step)
cleanup(fmkops)

–> Specific code

generate_data(dm, monitor, target)
graph_scenario(fmt, select_current=False)
property produced_seed
scenario = None
setup(dm, user_input)

–> Specific code return True if setup has succeeded, otherwise return False

class framework.tactics_helpers.Generator

Bases: framework.tactics_helpers.DataMaker

__annotations__ = {}
__init__()
__module__ = 'framework.tactics_helpers'
_cleanup()
_setup(dm, user_input)
cleanup(fmkops)

–> Specific code

clear_attr(name)
generate_data(dm, monitor, target)
is_attr_set(name)
need_reset()
produced_seed = None
set_attr(name)
setup(dm, user_input)

–> Specific code return True if setup has succeeded, otherwise return False

class framework.tactics_helpers.StatefulDisruptor

Bases: framework.tactics_helpers.DataMaker

__annotations__ = {}
__init__()
__module__ = 'framework.tactics_helpers'
_cleanup()
_set_seed(prev_data)
_setup(dm, user_input)
cleanup(fmkops)

–> Specific code

clear_attr(name)
disrupt_data(dm, target, data)

@data: it is either equal to prev_data the first time disrupt_data() is called by the FMK, or it is a an empty data (that is Data()).

handover()
is_attr_set(name)
set_attr(name)
set_seed(prev_data)
setup(dm, user_input)

–> Specific code return True if setup has succeeded, otherwise return False

class framework.tactics_helpers.Tactics

Bases: object

__clear_dmaker_clones(dmaker, dmaker_clones)
__clone_dmaker(dmaker, dmaker_clones, dmaker_type, new_dmaker_type, dmaker_name=None, register_func=None)
__get_random_data_maker(dict_var, dmaker_type, total_weight, valid)
__init__()
__module__ = 'framework.tactics_helpers'
__register_new_data_maker(dict_var, name, obj, weight, dmaker_type, valid)
__set_data_maker_weight(dict_var, dmaker_type, name, weight)
clear_disruptor_clones()
clear_generator_clones()
clone_disruptor(dmaker_type, new_dmaker_type=None, dmaker_name=None)
clone_generator(dmaker_type, new_dmaker_type=None, dmaker_name=None)
property disruptor_types
disruptors_info()
property generator_types
generators_info()
get_datatype_total_weight(dmaker_type)
get_disruptor_name(dmaker_type, obj)
get_disruptor_obj(dmaker_type, name)
get_disruptor_validness(dmaker_type, name)
get_disruptor_weight(dmaker_type, name)
get_disruptors_list(dmaker_type)
get_dmaker_type_total_weight(dmaker_type)
get_generator_name(dmaker_type, obj)
get_generator_obj(dmaker_type, name)
get_generator_validness(dmaker_type, name)
get_generator_weight(dmaker_type, name)
get_generators_list(dmaker_type)
get_info_from_obj(obj)
get_random_disruptor(dmaker_type, valid)
get_random_generator(dmaker_type, valid)
print_disruptor(dmaker_type, disruptor_name)
print_generator(dmaker_type, generator_name)
register_new_disruptor(name, obj, weight, dmaker_type, valid=False)
register_new_generator(name, obj, weight, dmaker_type, valid=False)
register_scenarios(*scenarios)
static scenario_ref_from(scenario)
set_additional_info(fmkops, related_dm=None)
set_disruptor_weight(dmaker_type, name, weight)
set_generator_weight(dmaker_type, name, weight)
framework.tactics_helpers._handle_user_inputs(dmaker, user_input)
framework.tactics_helpers._restore_dmaker_internals(dmaker)
framework.tactics_helpers._user_input_conformity(self, user_input, _args_desc)
framework.tactics_helpers.disruptor(st, dtype, weight=1, valid=False, args=None, modelwalker_user=False)
class framework.tactics_helpers.dyn_generator(name, bases, attrs)

Bases: type

__annotations__ = {}
__init__(name, bases, attrs)
__module__ = 'framework.tactics_helpers'
data_id = ''
class framework.tactics_helpers.dyn_generator_from_scenario(name, bases, attrs)

Bases: type

__annotations__ = {}
__module__ = 'framework.tactics_helpers'
static __new__(cls, name, bases, attrs)
scenario = None
framework.tactics_helpers.generator(st, gtype, weight=1, valid=False, args=None, modelwalker_user=False)
framework.tactics_helpers.modelwalker_inputs_handling_helper(dmaker)

13.2.21. framework.fuzzing_primitives module

class framework.fuzzing_primitives.AltConfConsumer(max_runs_per_node=- 1, min_runs_per_node=- 1, respect_order=True, fuzz_magnitude=1.0, fix_constraints=False, ignore_mutable_attr=False, consider_side_effects_on_sibbling=False, **kwargs)

Bases: framework.fuzzing_primitives.NodeConsumerStub

Note: save_node()/restore_node() are not overloaded although default implementation can triggers overhead, because for some cases copying the Elt is the better (e.g., for alternate conf on nonterm nodes, that reuse same subnodes over the various confs).

__module__ = 'framework.fuzzing_primitives'
consume_node(node)

Use this method to modify/alter or just read information on @node. This function will be called for each node that satisfy the criteria. (to be implemented according to the implementation of need_reset())

Return True to say that you have correctly consumed the node. Return False, if despite your current criteria for node interest, you are in fact not interested

init_specific(**kwargs)
need_reset(node)
recover_node(node)

Generic way to recover a node

save_node(node)

Generic way to save a node (can impact performance)

still_interested_by(node)
wait_for_exhaustion(node)
  • return -1 to wait until exhaustion

  • return 0 to stop node iteration after consumption (and yielding a value once)

  • return N-1 to stop iteration after at most N step (or before if exhaustion triggers)

class framework.fuzzing_primitives.BasicVisitor(max_runs_per_node=- 1, min_runs_per_node=- 1, respect_order=True, fuzz_magnitude=1.0, fix_constraints=False, ignore_mutable_attr=False, consider_side_effects_on_sibbling=False, **kwargs)

Bases: framework.fuzzing_primitives.NodeConsumerStub

__annotations__ = {}
__module__ = 'framework.fuzzing_primitives'
consume_node(node)

Use this method to modify/alter or just read information on @node. This function will be called for each node that satisfy the criteria. (to be implemented according to the implementation of need_reset())

Return True to say that you have correctly consumed the node. Return False, if despite your current criteria for node interest, you are in fact not interested

init_specific(reset_when_change=True)
need_reset(node)
recover_node(node)

Generic way to recover a node

reset_state()

Called by the ModelWalker to reinitialize the disruptor.

save_node(node)

Generic way to save a node (can impact performance)

wait_for_exhaustion(node)
  • return -1 to wait until exhaustion

  • return 0 to stop node iteration after consumption (and yielding a value once)

  • return N-1 to stop iteration after at most N step (or before if exhaustion triggers)

class framework.fuzzing_primitives.ModelWalker(root_node, node_consumer, make_determinist=False, make_random=False, max_steps=- 1, initial_step=1)

Bases: object

We walk through all states of the model and give opportunity to the Consumer to act on each node, and to be involved in the walking process in some extents.

The first rule of the walking process is to step up to a node exhaustion (which means that the consume_node() method of the Consumer won’t be called in-between)

Note: the change of a non-terminal node does not reset the indirect parents (just the direct parent), otherwise it could lead to a combinatorial explosion, with limited interest…

__init__(root_node, node_consumer, make_determinist=False, make_random=False, max_steps=- 1, initial_step=1)
__iter__()
__module__ = 'framework.fuzzing_primitives'
_do_reset(node, consumer)
node_consumer_helper(node, structure_has_changed, consumed_nodes, parent_node, consumer)
set_consumer(node_consumer)
walk_graph_rec(node_list, structure_has_changed, consumed_nodes, parent_node, consumer)
class framework.fuzzing_primitives.NodeConsumerStub(max_runs_per_node=- 1, min_runs_per_node=- 1, respect_order=True, fuzz_magnitude=1.0, fix_constraints=False, ignore_mutable_attr=False, consider_side_effects_on_sibbling=False, **kwargs)

Bases: object

__annotations__ = {}
__init__(max_runs_per_node=- 1, min_runs_per_node=- 1, respect_order=True, fuzz_magnitude=1.0, fix_constraints=False, ignore_mutable_attr=False, consider_side_effects_on_sibbling=False, **kwargs)
__module__ = 'framework.fuzzing_primitives'
consume_node(node)

Use this method to modify/alter or just read information on @node. This function will be called for each node that satisfy the criteria. (to be implemented according to the implementation of need_reset())

Return True to say that you have correctly consumed the node. Return False, if despite your current criteria for node interest, you are in fact not interested

do_after_reset(node)
init_specific(**kwargs)
interested_by(node)
max_nb_runs_for(node)
need_reset(node)
preload(root_node)

Called by the ModelWalker when it initializes

Parameters

root_node – Root node of the modeled data

Returns: None

recover_node(node)

Generic way to recover a node

reset_state()

Called by the ModelWalker to reinitialize the disruptor.

save_node(node)

Generic way to save a node (can impact performance)

set_node_interest(internals_criteria=None, semantics_criteria=None, owned_confs=None, path_regexp=None, conf=None)

@conf: criteria are applied for the provided conf if not None, otherwise current_conf is used Note: when all is None, NodeConsumer is interested by every node (that is interested_by() return always True)

still_interested_by(node)
wait_for_exhaustion(node)
  • return -1 to wait until exhaustion

  • return 0 to stop node iteration after consumption (and yielding a value once)

  • return N-1 to stop iteration after at most N step (or before if exhaustion triggers)

class framework.fuzzing_primitives.NonTermVisitor(max_runs_per_node=- 1, min_runs_per_node=- 1, respect_order=True, fuzz_magnitude=1.0, fix_constraints=False, ignore_mutable_attr=False, consider_side_effects_on_sibbling=False, **kwargs)

Bases: framework.fuzzing_primitives.BasicVisitor

__annotations__ = {}
__module__ = 'framework.fuzzing_primitives'
consume_node(node)

Use this method to modify/alter or just read information on @node. This function will be called for each node that satisfy the criteria. (to be implemented according to the implementation of need_reset())

Return True to say that you have correctly consumed the node. Return False, if despite your current criteria for node interest, you are in fact not interested

init_specific(reset_when_change=True)
need_reset(node)
still_interested_by(node)
wait_for_exhaustion(node)
  • return -1 to wait until exhaustion

  • return 0 to stop node iteration after consumption (and yielding a value once)

  • return N-1 to stop iteration after at most N step (or before if exhaustion triggers)

class framework.fuzzing_primitives.SeparatorDisruption(max_runs_per_node=- 1, min_runs_per_node=- 1, respect_order=True, fuzz_magnitude=1.0, fix_constraints=False, ignore_mutable_attr=False, consider_side_effects_on_sibbling=False, **kwargs)

Bases: framework.fuzzing_primitives.NodeConsumerStub

__annotations__ = {}
__module__ = 'framework.fuzzing_primitives'
consume_node(node)

Use this method to modify/alter or just read information on @node. This function will be called for each node that satisfy the criteria. (to be implemented according to the implementation of need_reset())

Return True to say that you have correctly consumed the node. Return False, if despite your current criteria for node interest, you are in fact not interested

init_specific(separators=None)
class framework.fuzzing_primitives.TypedNodeDisruption(max_runs_per_node=- 1, min_runs_per_node=- 1, respect_order=True, fuzz_magnitude=1.0, fix_constraints=False, ignore_mutable_attr=False, consider_side_effects_on_sibbling=False, **kwargs)

Bases: framework.fuzzing_primitives.NodeConsumerStub

__annotations__ = {}
__module__ = 'framework.fuzzing_primitives'
_add_separator_cases(vt_node)
_populate_fuzzy_vt_list(vt_node, fuzz_magnitude)
consume_node(node)

Use this method to modify/alter or just read information on @node. This function will be called for each node that satisfy the criteria. (to be implemented according to the implementation of need_reset())

Return True to say that you have correctly consumed the node. Return False, if despite your current criteria for node interest, you are in fact not interested

init_specific(ignore_separator=False, determinist=True)
need_reset(node)
preload(root_node)

Called by the ModelWalker when it initializes

Parameters

root_node – Root node of the modeled data

Returns: None

recover_node(node)

Generic way to recover a node

save_node(node)

Generic way to save a node (can impact performance)

still_interested_by(node)
framework.fuzzing_primitives.fuzz_data_tree(top_node, paths_regexp=None)

13.2.22. framework.encoders module

class framework.encoders.BitInverter_Enc(encoding_arg=None)

Bases: framework.encoders.Encoder, framework.encoders.EncoderAbsorptionHelper

__module__ = 'framework.encoders'
decode(byte_str)

To be overloaded. (Should be stateless.)

Raise EncoderUnrecognizedValue if decoding is not possible.

Parameters

val (bytes) – the encoded value

Returns

the decoded value

Return type

bytes

encode(byte_str)

To be overloaded. (Should be stateless.)

Parameters

val (bytes) – the value

Returns

the encoded value

Return type

bytes

how_much_can_be_consumed_from(blob: bytes)

To be overloaded. (Should be stateless.)

Try to determine the end of what is decodable from the beginning of blob. blob is always starting with the encoded part. Otherwise, the blob is to be considered not compliant with the Encoder. Raise EncoderUnrecognizedValue in this case.

If the blob is decodable but the correct size cannot be determined raise EncoderSizeNotFound.

Parameters

blob (bytes) – the encoded binary string

Returns

the size of what is decodable from the beginning of blob.

Return type

int

class framework.encoders.BitReverse_Enc(encoding_arg=None)

Bases: framework.encoders.Encoder

__annotations__ = {}
__module__ = 'framework.encoders'
_reverse_bits(x, nb_bits=8)

Reverse bits order of x

decode(val)

To be overloaded. (Should be stateless.)

Raise EncoderUnrecognizedValue if decoding is not possible.

Parameters

val (bytes) – the encoded value

Returns

the decoded value

Return type

bytes

encode(val)

To be overloaded. (Should be stateless.)

Parameters

val (bytes) – the value

Returns

the encoded value

Return type

bytes

class framework.encoders.Encoder(encoding_arg=None)

Bases: object

__annotations__ = {}
__copy__()
__init__(encoding_arg=None)
__module__ = 'framework.encoders'
decode(val)

To be overloaded. (Should be stateless.)

Raise EncoderUnrecognizedValue if decoding is not possible.

Parameters

val (bytes) – the encoded value

Returns

the decoded value

Return type

bytes

encode(val)

To be overloaded. (Should be stateless.)

Parameters

val (bytes) – the value

Returns

the encoded value

Return type

bytes

init_encoding_scheme(arg)

To be optionally overloaded by a subclass that deals with encoding, if encoding need to be initialized in some way. (called at init and in String.reset())

Parameters

arg – provided through the encoding_arg parameter of the String constructor

reset()
class framework.encoders.EncoderAbsorptionHelper

Bases: object

Helper used in the context of absorption

__annotations__ = {}
__module__ = 'framework.encoders'
how_much_can_be_consumed_from(blob: bytes)

To be overloaded. (Should be stateless.)

Try to determine the end of what is decodable from the beginning of blob. blob is always starting with the encoded part. Otherwise, the blob is to be considered not compliant with the Encoder. Raise EncoderUnrecognizedValue in this case.

If the blob is decodable but the correct size cannot be determined raise EncoderSizeNotFound.

Parameters

blob (bytes) – the encoded binary string

Returns

the size of what is decodable from the beginning of blob.

Return type

int

exception framework.encoders.EncoderSizeNotFoundError

Bases: Exception

__module__ = 'framework.encoders'
exception framework.encoders.EncoderUnrecognizedValueError

Bases: Exception

__module__ = 'framework.encoders'
class framework.encoders.GSM7bitPacking_Enc(encoding_arg=None)

Bases: framework.encoders.Encoder

__annotations__ = {}
__module__ = 'framework.encoders'
decode(msg)

To be overloaded. (Should be stateless.)

Raise EncoderUnrecognizedValue if decoding is not possible.

Parameters

val (bytes) – the encoded value

Returns

the decoded value

Return type

bytes

encode(msg)

To be overloaded. (Should be stateless.)

Parameters

val (bytes) – the value

Returns

the encoded value

Return type

bytes

class framework.encoders.GSMPhoneNum_Enc(encoding_arg=None)

Bases: framework.encoders.Encoder

__annotations__ = {}
__module__ = 'framework.encoders'
decode(msg)

To be overloaded. (Should be stateless.)

Raise EncoderUnrecognizedValue if decoding is not possible.

Parameters

val (bytes) – the encoded value

Returns

the decoded value

Return type

bytes

encode(msg)

To be overloaded. (Should be stateless.)

Parameters

val (bytes) – the value

Returns

the encoded value

Return type

bytes

class framework.encoders.GZIP_Enc(encoding_arg=None)

Bases: framework.encoders.Encoder

__annotations__ = {}
__module__ = 'framework.encoders'
decode(val)

To be overloaded. (Should be stateless.)

Raise EncoderUnrecognizedValue if decoding is not possible.

Parameters

val (bytes) – the encoded value

Returns

the decoded value

Return type

bytes

encode(val)

To be overloaded. (Should be stateless.)

Parameters

val (bytes) – the value

Returns

the encoded value

Return type

bytes

init_encoding_scheme(arg=None)

To be optionally overloaded by a subclass that deals with encoding, if encoding need to be initialized in some way. (called at init and in String.reset())

Parameters

arg – provided through the encoding_arg parameter of the String constructor

class framework.encoders.Wrap_Enc(encoding_arg=None)

Bases: framework.encoders.Encoder

Encoder to be used as a mean to wrap a Node with a prefix and/or a suffix, without defining specific Nodes for that (meaning you don’t need to model that part and want to simplify your data description).

__annotations__ = {}
__module__ = 'framework.encoders'
decode(val)

To be overloaded. (Should be stateless.)

Raise EncoderUnrecognizedValue if decoding is not possible.

Parameters

val (bytes) – the encoded value

Returns

the decoded value

Return type

bytes

encode(val)

To be overloaded. (Should be stateless.)

Parameters

val (bytes) – the value

Returns

the encoded value

Return type

bytes

init_encoding_scheme(arg)

Take a list parameter specifying the prefix and the suffix to add to the value to encode, or to remove from an encoded value.

Parameters

arg (list) – Prefix and suffix character strings. Can be individually set to None

13.2.23. framework.database module

class framework.database.Database(fmkdb_path=None)

Bases: object

DDL_fname = 'fmk_db.sql'
DEFAULT_DB_NAME = 'fmkDB.db'
DEFAULT_DM_NAME = '__DEFAULT_DATAMODEL'
DEFAULT_GEN_NAME = '__DEFAULT_GNAME'
DEFAULT_GTYPE_NAME = '__DEFAULT_GTYPE'
FEEDBACK_TRAIL_TIME_WINDOW = 10
OUTCOME_DATA = 2
OUTCOME_ROWID = 1
__init__(fmkdb_path=None)
__module__ = 'framework.database'
_get_color_function(colorized)
_handle_binary_content(content, sz_limit=None, raw=False, colorized=True)
_is_valid(connection, cursor)
_sql_handler()
_stop_sql_handler()
check_data_existence(data_id, colorized=True)
column_names_from(table)
disable()
display_data_info(data_id, with_data=False, with_fbk=False, with_fmkinfo=True, with_analysis=True, with_async_data=False, fbk_src=None, limit_data_sz=None, page_width=100, colorized=True, raw=False, decoding_hints=None, dm_list=None)
display_data_info_by_date(start, end, with_data=False, with_fbk=False, with_fmkinfo=True, with_analysis=True, with_async_data=False, fbk_src=None, prj_name=None, limit_data_sz=None, raw=False, page_width=100, colorized=True, decoding_hints=None, dm_list=None)
display_data_info_by_range(first_id, last_id, with_data=False, with_fbk=False, with_fmkinfo=True, with_analysis=True, with_async_data=False, fbk_src=None, prj_name=None, limit_data_sz=None, raw=False, page_width=100, colorized=True, decoding_hints=None, dm_list=None)
display_stats(colorized=True)
enable()
execute_sql_statement(sql_stmt, params=None)
export_data(first, last=None, colorized=True)
fetch_data(start_id=1, end_id=- 1)
flush_current_feedback()
flush_feedback()
get_data_with_impact(prj_name=None, fbk_src=None, fbk_status_formula='? < 0', display=True, verbose=False, raw_analysis=False, colorized=True)
get_data_with_specific_fbk(fbk, prj_name=None, fbk_src=None, display=True, colorized=True)
get_data_without_fbk(prj_name=None, fbk_src=None, display=True, colorized=True)
static get_default_db_path()
get_next_data_id(prev_id=None)
get_project_record(prj_name=None)
insert_analysis(data_id, content, date, impact=False)
insert_async_data(dtype, dm_name, raw_data, sz, sent_date, target_ref, prj_name, current_data_id=None)
insert_comment(data_id, content, date)
insert_data(dtype, dm_name, raw_data, sz, sent_date, ack_date, target_ref, prj_name, group_id=None)
insert_data_model(dm_name)
insert_dmaker(dm_name, dtype, name, is_gen, stateful, clone_type=None)
insert_feedback(data_id, source, timestamp, content, status_code=None)
insert_fmk_info(data_id, content, date, error=False)
insert_project(prj_name)
insert_steps(data_id, step_id, dmaker_type, dmaker_name, data_id_src, user_input, info)
is_enabled()
iter_feedback_entries(last=True, source=None)
remove_data(data_id, colorized=True)
shrink_db()
start()
stop()
submit_sql_stmt(stmt, params=None, outcome_type: Optional[int] = None, error_msg='')

This method is the only one that should submit request to the threaded SQL handler. It is also synchronized to guarantee request order (especially needed when you wait for the outcomes of your submitted SQL statement).

Parameters
  • stmt (str) – SQL statement

  • params (tuple) – parameters

  • outcome_type (int) – type of the expected outcomes. If None, no outcomes are expected

  • error_msg (str) – specific error message to display in case of an error

Returns

None or the expected outcomes

class framework.database.FeedbackGate(database, only_last_entries=True)

Bases: object

__bool__()
__init__(database, only_last_entries=True)
Parameters

database (Database) – database to be associated with

__iter__()
__module__ = 'framework.database'
get_feedback_from(source)
iter_entries(source=None)

Iterate over feedback entries that are related to the last data which has been sent by the framework.

Parameters

source (FeedbackSource) – feedback source to consider

Returns

A generator that iterates over all the requested feedback entries and provides for each:

  • the triplet: (status, timestamp, content) if source is associated to a specific feedback source

  • the 4-uplet: (source, status, timestamp, content) if source is None

Return type

python generator

property size
sources_names()

Return a list of the feedback source names related to the last data which has been sent by the framework.

Returns

names of the feedback sources

Return type

list

framework.database.regexp(expr, item)
framework.database.regexp_bin(expr, item)

13.2.24. framework.scenario module

class framework.scenario.FinalStep(data_desc=None, final=False, fbk_timeout=None, fbk_mode=None, sending_delay=None, set_periodic=None, clear_periodic=None, step_desc=None, start_tasks=None, stop_tasks=None, do_before_data_processing=None, do_before_sending=None, valid=True, vtg_ids=None, refresh_atoms=True, private=None)

Bases: framework.scenario.Step

__init__(data_desc=None, final=False, fbk_timeout=None, fbk_mode=None, sending_delay=None, set_periodic=None, clear_periodic=None, step_desc=None, start_tasks=None, stop_tasks=None, do_before_data_processing=None, do_before_sending=None, valid=True, vtg_ids=None, refresh_atoms=True, private=None)

Step objects are the building blocks of Scenarios.

Parameters
  • data_desc

  • final

  • fbk_timeout

  • fbk_mode

  • set_periodic

  • clear_periodic

  • step_desc

  • do_before_data_processing

  • do_before_sending

  • valid

  • vtg_ids (list, int) – Virtual ID list of the targets to which the outcomes of this data process will be sent. If None, the outcomes will be sent to the first target that has been enabled. If data_desc is a list, this parameter should be a list where each item is the vtg_ids of the corresponding item in the data_desc list.

  • transition_on_dp_complete (bool) – this attribute is set to True by the framework.

  • refresh_atoms (bool) – if set to True atoms described by names in data_desc will be re-instanced each time the step is entered.

  • private – Provided for arbitrary usage while building a scenario. It can be leveraged for instance within scenario callbacks to identify specific steps and/or provide specific information to a step.

__module__ = 'framework.scenario'
class framework.scenario.NoDataStep(data_desc=None, final=False, fbk_timeout=None, fbk_mode=None, sending_delay=None, set_periodic=None, clear_periodic=None, step_desc=None, start_tasks=None, stop_tasks=None, do_before_data_processing=None, do_before_sending=None, valid=True, vtg_ids=None, refresh_atoms=True, private=None)

Bases: framework.scenario.Step

__annotations__ = {}
__init__(data_desc=None, final=False, fbk_timeout=None, fbk_mode=None, sending_delay=None, set_periodic=None, clear_periodic=None, step_desc=None, start_tasks=None, stop_tasks=None, do_before_data_processing=None, do_before_sending=None, valid=True, vtg_ids=None, refresh_atoms=True, private=None)

Step objects are the building blocks of Scenarios.

Parameters
  • data_desc

  • final

  • fbk_timeout

  • fbk_mode

  • set_periodic

  • clear_periodic

  • step_desc

  • do_before_data_processing

  • do_before_sending

  • valid

  • vtg_ids (list, int) – Virtual ID list of the targets to which the outcomes of this data process will be sent. If None, the outcomes will be sent to the first target that has been enabled. If data_desc is a list, this parameter should be a list where each item is the vtg_ids of the corresponding item in the data_desc list.

  • transition_on_dp_complete (bool) – this attribute is set to True by the framework.

  • refresh_atoms (bool) – if set to True atoms described by names in data_desc will be re-instanced each time the step is entered.

  • private – Provided for arbitrary usage while building a scenario. It can be leveraged for instance within scenario callbacks to identify specific steps and/or provide specific information to a step.

__module__ = 'framework.scenario'
make_free()
class framework.scenario.Periodic(data, period=None, vtg_ids=None)

Bases: object

__init__(data, period=None, vtg_ids=None)
__module__ = 'framework.scenario'
__str__()

Return str(self).

class framework.scenario.Scenario(name, anchor=None, reinit_anchor=None, user_context=None, user_args=None)

Bases: object

__copy__()
__init__(name, anchor=None, reinit_anchor=None, user_context=None, user_args=None)

Note: only at copy the ScenarioEnv are propagated to the steps and transitions

Parameters
  • name

  • anchor

  • reinit_anchor

  • user_context

  • user_args

__module__ = 'framework.scenario'
__str__()

Return str(self).

_graph_setup(init_step, steps, transitions)
_init_main_properties()
_init_reinit_seq_properties()
_view_linux(filepath, graph_filename)

Open filepath in the user’s preferred application (linux).

_view_windows(filepath, graph_filename)

Start filepath with its associated application (windows).

property anchor
branch_to_reinit(step, prepend=True)
clone(new_name)
property current_step
property env
graph(fmt='pdf', select_current=False, display_ucontext=True)
merge_user_context_with(user_context)
property periodic_to_clear
property reinit_steps
property reinit_transitions
reset()
set_anchor(anchor, current=None)
set_data_model(dm)
set_reinit_anchor(reinit_anchor)
set_scenario_env(env: framework.scenario.ScenarioEnv, merge_user_contexts: bool = True)
Parameters
  • env

  • merge_user_contexts – the new env will have a user_context that is the merging of the current one and the one provided through the new env. In case some parameter names overlaps, the new values are kept.

set_target(target)
property steps
property tasks_to_stop
property transitions
property user_context
walk_to(step)
walk_to_reinit()
class framework.scenario.ScenarioEnv

Bases: object

__copy__()
__init__()
__module__ = 'framework.scenario'
property dm
knowledge_source = None
property scenario
property target
property user_context
class framework.scenario.Step(data_desc=None, final=False, fbk_timeout=None, fbk_mode=None, sending_delay=None, set_periodic=None, clear_periodic=None, step_desc=None, start_tasks=None, stop_tasks=None, do_before_data_processing=None, do_before_sending=None, valid=True, vtg_ids=None, refresh_atoms=True, private=None)

Bases: object

__annotations__ = {}
__copy__()
__hash__()

Return hash(self).

__init__(data_desc=None, final=False, fbk_timeout=None, fbk_mode=None, sending_delay=None, set_periodic=None, clear_periodic=None, step_desc=None, start_tasks=None, stop_tasks=None, do_before_data_processing=None, do_before_sending=None, valid=True, vtg_ids=None, refresh_atoms=True, private=None)

Step objects are the building blocks of Scenarios.

Parameters
  • data_desc

  • final

  • fbk_timeout

  • fbk_mode

  • set_periodic

  • clear_periodic

  • step_desc

  • do_before_data_processing

  • do_before_sending

  • valid

  • vtg_ids (list, int) – Virtual ID list of the targets to which the outcomes of this data process will be sent. If None, the outcomes will be sent to the first target that has been enabled. If data_desc is a list, this parameter should be a list where each item is the vtg_ids of the corresponding item in the data_desc list.

  • transition_on_dp_complete (bool) – this attribute is set to True by the framework.

  • refresh_atoms (bool) – if set to True atoms described by names in data_desc will be re-instanced each time the step is entered.

  • private – Provided for arbitrary usage while building a scenario. It can be leveraged for instance within scenario callbacks to identify specific steps and/or provide specific information to a step.

__module__ = 'framework.scenario'
__str__()

Return str(self).

_handle_data_desc(data_desc)
_stutter_cbk(env, current_step, next_step)
cleanup()
clear_dmaker_reset()

Restore the state changed by .set_dmaker_reset()

connect_to(obj, dp_completed_guard=False, cbk_after_sending=None, cbk_after_fbk=None, prepend=False, description=None)
property content

Provide the atom of the step if possible. In the case of a DataProcess, if it has been carried out, then the resulting atom is returned, otherwise the seed atom is returned if it exists.

Provide an atom list if the step contain multiple atom

property data_desc
do_before_data_processing()
do_before_sending()
property feedback_mode
property feedback_timeout
get_data()
get_desc(oneliner=True)
get_full_description(oneliner=True)
get_periodic_description()
get_periodic_ref()
get_tasks_description()
get_tasks_ref()
has_dataprocess()
has_tasks_to_start()
has_tasks_to_stop()
is_blocked()
is_periodic_cleared()
is_periodic_set()
make_blocked()
make_free()
make_stutter(count=None, rd_count_range: Optional[Tuple[int, int]] = None, fbk_timeout_range: Optional[Tuple[float, float]] = None)

Further to this call, a step is connected to itself with a guard enabling looping on the step for a number of time: either @count times or a random value within @rd_count_range.

Parameters
  • count – number of loops.

  • rd_count_range – number of loops is determined randomly within the bounds provided by this parameter.

  • fbk_timeout_range – feedback timeout is chosen randomly within the bounds provided by this parameter.

property periodic_to_clear
property periodic_to_set
property sending_delay
set_dmaker_reset()

Request the framework to reset the data makers involved in the step before processing them. Relevant only when DataProcess are in use.

set_scenario_env(env)
set_transitions(transitions)
property tasks_to_start
property tasks_to_stop
property transitions
class framework.scenario.StepStub(data_desc=None, final=False, fbk_timeout=None, fbk_mode=None, sending_delay=None, set_periodic=None, clear_periodic=None, step_desc=None, start_tasks=None, stop_tasks=None, do_before_data_processing=None, do_before_sending=None, valid=True, vtg_ids=None, refresh_atoms=True, private=None)

Bases: framework.scenario.Step

__annotations__ = {}
__init__(data_desc=None, final=False, fbk_timeout=None, fbk_mode=None, sending_delay=None, set_periodic=None, clear_periodic=None, step_desc=None, start_tasks=None, stop_tasks=None, do_before_data_processing=None, do_before_sending=None, valid=True, vtg_ids=None, refresh_atoms=True, private=None)

Step objects are the building blocks of Scenarios.

Parameters
  • data_desc

  • final

  • fbk_timeout

  • fbk_mode

  • set_periodic

  • clear_periodic

  • step_desc

  • do_before_data_processing

  • do_before_sending

  • valid

  • vtg_ids (list, int) – Virtual ID list of the targets to which the outcomes of this data process will be sent. If None, the outcomes will be sent to the first target that has been enabled. If data_desc is a list, this parameter should be a list where each item is the vtg_ids of the corresponding item in the data_desc list.

  • transition_on_dp_complete (bool) – this attribute is set to True by the framework.

  • refresh_atoms (bool) – if set to True atoms described by names in data_desc will be re-instanced each time the step is entered.

  • private – Provided for arbitrary usage while building a scenario. It can be leveraged for instance within scenario callbacks to identify specific steps and/or provide specific information to a step.

__module__ = 'framework.scenario'
class framework.scenario.Transition(obj, dp_completed_guard=False, cbk_after_sending=None, cbk_after_fbk=None, description=None)

Bases: object

__copy__()
__hash__()

Return hash(self).

__init__(obj, dp_completed_guard=False, cbk_after_sending=None, cbk_after_fbk=None, description=None)
__module__ = 'framework.scenario'
__str__()

Return str(self).

has_callback()
has_callback_pending()
invert_conditions()
is_crossable()
make_uncrossable()
register_callback(callback, hook=HOOK.after_fbk)
run_callback(current_step, feedback=None, hook=HOOK.after_fbk)
set_scenario_env(env, merge_user_contexts: bool = True)
property step

13.2.25. framework.dmhelpers.generic module

framework.dmhelpers.generic.COPY_VALUE(path, depth=None, vt=None, set_attrs=None, clear_attrs=None, after_encoding=True)

Return a generator that retrieves the value of another node, and then return a vt node with this value. The other node is selected:

  • either directly by following the provided relative path from the given generator-parameter node.

  • or indirectly (if depth is provided) where a base node is first selected automatically, based on our current index within our own parent node (or the nth-ancestor, depending on the parameter depth), and then the targeted node is selected by following the provided relative path from the base node.

Parameters
  • path (str) – relative path to the node whose value will be picked.

  • depth (int) – depth of our nth-ancestor used as a reference to compute automatically the targeted base node position.

  • vt (type) – value type used for node generation (refer to framework.value_types).

  • set_attrs (list) – attributes that will be set on the generated node.

  • clear_attrs (list) – attributes that will be cleared on the generated node.

  • after_encoding (bool) – if False, copy the raw value, otherwise the encoded one. Can be set to False only if node arguments support encoding.

framework.dmhelpers.generic.CRC(vt=<class 'framework.value_types.INT_str'>, poly=4374732215, init_crc=0, xor_out=4294967295, rev=True, set_attrs=None, clear_attrs=None, after_encoding=True, freezable=False, base=16, letter_case='upper', min_sz=4, reverse_str=False)

Return a generator that returns the CRC (in the chosen type) of all the node parameters. (Default CRC is PKZIP CRC32)

Parameters
  • vt (type) – value type used for node generation (refer to framework.value_types)

  • poly (int) – CRC polynom

  • init_crc (int) – initial value used to start the CRC calculation.

  • xor_out (int) – final value to XOR with the calculated CRC value.

  • rev (bool) – bit reversed algorithm when True.

  • set_attrs (list) – attributes that will be set on the generated node.

  • clear_attrs (list) – attributes that will be cleared on the generated node.

  • after_encoding (bool) – if False compute the CRC before any encoding. Can be set to False only if node arguments support encoding.

  • freezable (bool) – if False make the generator unfreezable in order to always provide the right value. (Note that tTYPE will still be able to corrupt the generator.)

  • base (int) – Relevant when vt is INT_str. Numerical base to use for string representation

  • letter_case (str) – Relevant when vt is INT_str. Letter case for string representation (‘upper’ or ‘lower’)

  • min_sz (int) – Relevant when vt is INT_str. Minimum size of the resulting string.

  • reverse_str (bool) – Reverse the order of the string if set to True.

framework.dmhelpers.generic.CYCLE(vals, depth=1, vt=<class 'framework.value_types.String'>, set_attrs=None, clear_attrs=None)

Return a generator that iterates other the provided value list and returns at each step a vt node corresponding to the current value.

Parameters
  • vals (list) – the value list to iterate on.

  • depth (int) – depth of our nth-ancestor used as a reference to iterate. By default, it is the parent node. Thus, in this case, depending on the drawn quantity of parent nodes, the position within the grand-parent determines the index of the value to use in the provided list, modulo the quantity.

  • vt (type) – value type used for node generation (refer to framework.value_types).

  • set_attrs (list) – attributes that will be set on the generated node.

  • clear_attrs (list) – attributes that will be cleared on the generated node.

framework.dmhelpers.generic.LEN(vt=<class 'framework.value_types.INT_str'>, base_len=0, set_attrs=None, clear_attrs=None, after_encoding=True, freezable=False)

Return a generator that returns the length of a node parameter.

Parameters
  • vt (type) – value type used for node generation (refer to framework.value_types).

  • base_len (int) – this base length will be added to the computed length.

  • set_attrs (list) – attributes that will be set on the generated node.

  • clear_attrs (list) – attributes that will be cleared on the generated node.

  • after_encoding (bool) – if False compute the length before any encoding. Can be set to False only if node arguments support encoding.

  • freezable (bool) – If False make the generator unfreezable in order to always provide the right value. (Note that tTYPE will still be able to corrupt the generator.)

class framework.dmhelpers.generic.MH

Bases: object

Define constants and generator templates for data model description.

class Attr

Bases: object

Abs_Postpone = 6
DEBUG = 40
Determinist = 3
Finite = 4
Freezable = 1
Highlight = 30
LOCKED = 50
Mutable = 2
Separator = 15
__module__ = 'framework.dmhelpers.generic'
class Charset

Bases: object

ASCII = 1
ASCII_EXT = 2
UNICODE = 3
__module__ = 'framework.dmhelpers.generic'
Copy = 'u'
class Custo

Bases: object

class Func

Bases: object

CloneExtNodeArgs = 2
FrozenArgs = 1
__module__ = 'framework.dmhelpers.generic'
class Gen

Bases: object

CloneExtNodeArgs = 2
ForwardConfChange = 1
ResetOnUnfreeze = 3
TriggerLast = 4
__module__ = 'framework.dmhelpers.generic'
class NTerm

Bases: object

CollapsePadding = 4
CycleClone = 2
DelayCollapsing = 5
FrozenCopy = 3
FullCombinatory = 6
MutableClone = 1
StickToDefault = 7
__module__ = 'framework.dmhelpers.generic'
__module__ = 'framework.dmhelpers.generic'
FullyRandom = '=.'
Generator = 2
Leaf = 3
NonTerminal = 1
Ordered = '>'
Pick = '=+'
Random = '=..'
RawNode = 4
Regex = 5
ZeroCopy = 's'
__module__ = 'framework.dmhelpers.generic'
static _handle_attrs(n, set_attrs, clear_attrs)
static _validate_int_vt(vt)
static _validate_vt(vt)
framework.dmhelpers.generic.OFFSET(use_current_position=True, depth=1, vt=<class 'framework.value_types.INT_str'>, set_attrs=None, clear_attrs=None, after_encoding=True, freezable=False)

Return a generator that computes the offset of a child node within its parent node.

If use_current_position is True, the child node is selected automatically, based on our current index within our own parent node (or the nth-ancestor, depending on the parameter depth). Otherwise, the child node has to be provided in the node parameters just before its parent node.

Besides, if there are N node parameters, the first N-1 (or N-2 if use_current_position is False) nodes are used for adding a fixed amount (the length of their concatenated values) to the offset (determined thanks to the node in the last position of the node parameters).

The generator returns the result wrapped in a vt node.

Parameters
  • use_current_position (bool) – automate the computation of the child node position

  • depth (int) – depth of our nth-ancestor used as a reference to compute automatically the targeted child node position. Only relevant if use_current_position is True.

  • vt (type) – value type used for node generation (refer to framework.value_types).

  • set_attrs (list) – attributes that will be set on the generated node.

  • clear_attrs (list) – attributes that will be cleared on the generated node.

  • after_encoding (bool) – if False compute the fixed amount part of the offset before any encoding. Can be set to False only if node arguments support encoding.

  • freezable (bool) – If False make the generator unfreezable in order to always provide the right value. (Note that tTYPE will still be able to corrupt the generator.)

framework.dmhelpers.generic.QTY(node_name, vt=<class 'framework.value_types.INT_str'>, set_attrs=None, clear_attrs=None, freezable=False)

Return a generator that returns the quantity of child node instances (referenced by name) of the node parameter provided to the generator.

Parameters
  • vt (type) – value type used for node generation (refer to framework.value_types)

  • node_name (str) – name of the child node whose instance amount will be returned by the generator

  • set_attrs (list) – attributes that will be set on the generated node.

  • clear_attrs (list) – attributes that will be cleared on the generated node.

  • freezable (bool) – If False make the generator unfreezable in order to always provide the right value. (Note that tTYPE will still be able to corrupt the generator.)

framework.dmhelpers.generic.SELECT(idx=None, path=None, filter_func=None, fallback_node=None, clone=True, set_attrs=None, clear_attrs=None)

Return a generator that select a subnode from a non-terminal node and return it (or a copy of it depending on the parameter clone). If the path parameter is provided, the previous selected node is searched for the path in order to return the related subnode instead. The non-terminal node is selected regarding various criteria provided as parameters.

Parameters
  • idx (int) – if None, the node will be selected randomly, otherwise it should be given the subnodes position in the non-terminal node, or in the subset of subnodes if the filter_func parameter is provided.

  • path (str) – if provided, it has to be a path identifying a subnode to clone from the selected node.

  • filter_func – function to filter the subnodes prior to any selection.

  • fallback_node (Node) – if ‘path’ does not exist, then the clone node will be the one provided in this parameter.

  • clone (bool) – [default: True] If True, the returned node will be cloned, otherwise the original node will be returned.

  • set_attrs (list) – attributes that will be set on the generated node (only if clone is True).

  • clear_attrs (list) – attributes that will be cleared on the generated node (only if clone is True).

framework.dmhelpers.generic.TIMESTAMP(time_format='%H%M%S', utc=False, set_attrs=None, clear_attrs=None)

Return a generator that returns the current time (in a String node).

Parameters
  • time_format (str) – time format to be used by the generator.

  • set_attrs (list) – attributes that will be set on the generated node.

  • clear_attrs (list) – attributes that will be cleared on the generated node.

framework.dmhelpers.generic.WRAP(func, vt=<class 'framework.value_types.String'>, set_attrs=None, clear_attrs=None, after_encoding=True, freezable=False)

Return a generator that returns the result (in the chosen type) of the provided function applied on the concatenation of all the node parameters.

Parameters
  • func (function) – function applied on the concatenation

  • vt (type) – value type used for node generation (refer to framework.value_types)

  • set_attrs (list) – attributes that will be set on the generated node.

  • clear_attrs (list) – attributes that will be cleared on the generated node.

  • after_encoding (bool) – if False, execute func on node arguments before any encoding. Can be set to False only if node arguments support encoding.

  • freezable (bool) – If False make the generator unfreezable in order to always provide the right value. (Note that tTYPE will still be able to corrupt the generator.)

13.2.26. framework.dmhelpers.xml module

class framework.dmhelpers.xml.TAG_TYPE(value)

Bases: enum.Enum

An enumeration.

__module__ = 'framework.dmhelpers.xml'
comment = 2
proc_instr = 3
standard = 1
framework.dmhelpers.xml.tag_builder(tag_name, params=None, refs=None, contents=None, node_name=None, codec='latin-1', tag_name_mutable=True, struct_mutable=True, determinist=True, condition=None, absorb_regexp=None, specific_fuzzy_vals=None, tag_type=TAG_TYPE.standard, nl_prefix=False, nl_suffix=False)

Helper for modeling an XML tag.

Parameters
  • tag_name (str) – name of the XML tag.

  • params (dict) – optional attributes to be added in the XML tag

  • refs (dict) – if provided it should contain for at least one parameter key (provided in params dict) the name to be used for the node representing the corresponding value. Useful when the parameter condition is in use and needs to relate to the value of specific parameters.

  • contents – can be either None (empty tag), a framework.data_model.Node, a dictionary (Node description), a string or a string list (string-Node values).

  • node_name (str) – name of the node to be created.

  • codec (str) – codec to be used for generating the XML tag.

  • tag_name_mutable (bool) – if False, the tag name will not be mutable, meaning that its Mutable attribute will be cleared.

  • struct_mutable (bool) – if False the XML structure “will not” be mutable, meaning that each node related to the structure will have its Mutable attribute cleared.

  • determinist (bool) – if False, the attribute order could change from one retrieved data to another.

  • condition (tuple) – optional existence condition for the tag. If not None a keyword exists_if will be added to the root node with this parameter as a value.

  • absorb_regexp (str) – regex for contents absorption

  • tag_type (TAG_TYPE) – specify the type of notation

  • specific_fuzzy_vals (dict) – if provided it should contain for at least one parameter key (provided in params dict) a list of specific values that will be used by some generic disruptors like tTYPE.

  • nl_prefix (bool) – add a new line character before the tag

  • nl_suffix (bool) – add a new line character after the tag

Returns

Node-description of the XML tag.

Return type

dict

framework.dmhelpers.xml.xml_decl_builder(determinist=True)

13.2.27. framework.evolutionary_helpers module

class framework.evolutionary_helpers.CrossoverHelper

Bases: object

class Operand(node)

Bases: object

__init__(node)
__module__ = 'framework.evolutionary_helpers'
_count_brothers(index, pattern)
_merge_brothers(index, pattern, length)
compute_sub_graphs(percentage)
__module__ = 'framework.evolutionary_helpers'
static _add_default_crossover_info(ind_1, ind_2, crossover_desc='')
classmethod _crossover_algo2(ind_1, ind_2, percentage_to_share)
static _get_nodes(node)
static _swap_nodes(node_1, node_2)
classmethod crossover_algo1(ind_1, ind_2)
classmethod get_configured_crossover_algo2(percentage_to_share=None)
Parameters

percentage_to_share – Percentage of the nodes to share.

Returns: func

class framework.evolutionary_helpers.DefaultIndividual(fmk, data, mutation_order=1)

Bases: framework.evolutionary_helpers.Individual

Provide a default implementation of the Individual class

__init__(fmk, data, mutation_order=1)
__module__ = 'framework.evolutionary_helpers'
mutate()
class framework.evolutionary_helpers.DefaultPopulation(fmk, *args, **kwargs)

Bases: framework.evolutionary_helpers.Population

Provide a default implementation of the Population base class

__module__ = 'framework.evolutionary_helpers'
__repr__()

Return repr(self).

_compute_probability_of_survival()

Normalize fitness scores between 0 and 1

_compute_scores()

Compute the scores of each individuals

_crossover()

Compensates the kills through the usage of the COMB disruptor

_initialize(init_process, max_size=100, max_generation_nb=50, crossover_algo=<bound method CrossoverHelper.crossover_algo1 of <class 'framework.evolutionary_helpers.CrossoverHelper'>>)

Configure the population

Parameters
  • init_process (string) – individuals that compose this population will be built using the provided framework.data.DataProcess

  • max_size (integer) – maximum size of the population to manipulate

  • max_generation_nb (integer) – criteria used to stop the evolution process

  • crossover_algo (func) – Crossover algorithm to use

_kill()

Simply rolls the dice

_mutate()

Operates three bit flips on each individual

evolve()

Describe the evolutionary process

is_final()

Check if the population can still evolve or not

reset()

Generate the first generation of individuals in a random way

class framework.evolutionary_helpers.EvolutionaryScenariosFactory

Bases: object

__module__ = 'framework.evolutionary_helpers'
static build(fmk, name, population_cls, args)

Create a scenario that takes advantage of an evolutionary approach :param fmk: reference to FmkPlumbing :type fmk: FmkPlumbing :param name: name of the scenario to create :type name: string :param population_cls: population class to instantiate :type population_cls: classobj :param args (dict of str: object): arguments that will be used to instantiate a population

Returns

evolutionary scenario

Return type

Scenario

class framework.evolutionary_helpers.Individual(fmk, data)

Bases: object

Represents a population member

__annotations__ = {}
__init__(fmk, data)
__module__ = 'framework.evolutionary_helpers'
mutate()
class framework.evolutionary_helpers.Population(fmk, *args, **kwargs)

Bases: object

Population to be used within an evolutionary scenario

__annotations__ = {}
__delitem__(key)
__getitem__(key)
__init__(fmk, *args, **kwargs)
__iter__()
__len__()
__module__ = 'framework.evolutionary_helpers'
__next__()
__repr__()

Return repr(self).

__setitem__(key, value)
_initialize(*args, **kwargs)

Initialize the population Only called once during the creating of the Population instance

evolve()

Describe the evolutionary process

is_final()

Check if the population can still evolve or not

next()
reset()

Reset the population Called before each evolutionary process

size()

13.2.28. framework.knowledge.feedback_collector module

class framework.knowledge.feedback_collector.FeedbackCollector

Bases: object

__init__()
__iter__()
__module__ = 'framework.knowledge.feedback_collector'
add_fbk_from(ref, fbk, status=0)
cleanup()
fbk_lock = <unlocked _thread.lock object>
get_bytes()
get_error_code()
get_timestamp()
has_fbk_collector()
iter_and_cleanup_collector()
set_bytes(bstring)
set_error_code(err_code)
class framework.knowledge.feedback_collector.FeedbackSource(src, subref=None, reliability=None, related_tg=None, display_feedback=True)

Bases: object

__eq__(other)

Return self==value.

__hash__()

Return hash(self).

__init__(src, subref=None, reliability=None, related_tg=None, display_feedback=True)
__module__ = 'framework.knowledge.feedback_collector'
__str__()

Return str(self).

property display_feedback
property obj
property related_tg

13.2.29. framework.knowledge.feedback_handler module

class framework.knowledge.feedback_handler.FeedbackHandler(new_window=False, new_window_title=None)

Bases: object

A feedback handler extract information from binary data.

__init__(new_window=False, new_window_title=None)
Parameters

new_window – If True, a new terminal emulator is created, enabling the decoder to use it for display via the methods print() and print_nl()

__module__ = 'framework.knowledge.feedback_handler'
_start(current_dm)
_stop()
collect_data(s)
estimate_last_data_impact_uniqueness()

* To be overloaded *

Estimate the similarity of the consequences triggered by the current data sending from previous sending. Estimation can be computed with provided feedback.

Returns

provide an estimation of impact similarity

Return type

SimilarityMeasure

extract_info_from_feedback(current_dm, source, timestamp, content, status)

* To be overloaded *

Parameters
Returns

a set of information.Info or only one

Return type

Info

flush_collector()
notify_data_sending(current_dm, data_list, timestamp, target)

* To be overloaded *

This function is called when data have been sent. It enables to process feedback relatively to previously sent data.

Parameters
print(msg)
print_nl(msg)
process_feedback(current_dm, source, timestamp, content, status)
start(current_dm)
stop()
class framework.knowledge.feedback_handler.SimilarityMeasure(level=0)

Bases: object

__add__(other)
__eq__(other)

Return self==value.

__ge__(other, NotImplemented=NotImplemented)

Return a >= b. Computed by @total_ordering from (not a < b).

__gt__(other, NotImplemented=NotImplemented)

Return a > b. Computed by @total_ordering from (not a < b) and (a != b).

__hash__ = None
__init__(level=0)
__le__(other, NotImplemented=NotImplemented)

Return a <= b. Computed by @total_ordering from (a < b) or (a == b).

__lt__(other)

Return self<value.

__module__ = 'framework.knowledge.feedback_handler'
property value
class framework.knowledge.feedback_handler.TestFbkHandler(new_window=False, new_window_title=None)

Bases: framework.knowledge.feedback_handler.FeedbackHandler

__annotations__ = {}
__module__ = 'framework.knowledge.feedback_handler'
extract_info_from_feedback(current_dm, source, timestamp, content, status)

* To be overloaded *

Parameters
Returns

a set of information.Info or only one

Return type

Info

13.2.30. framework.knowledge.information module

class framework.knowledge.information.Hardware(value)

Bases: framework.knowledge.information.Info

An enumeration.

ARM = 4
PowerPc = 3
Unknown = 5
X86_32 = 2
X86_64 = 1
__module__ = 'framework.knowledge.information'
class framework.knowledge.information.Info(value)

Bases: enum.Enum

An enumeration.

__init__(val)
__module__ = 'framework.knowledge.information'
decrease_trust(inc=1)
increase_trust(inc=1)
reset_trust()
property trust_level
property trust_value
class framework.knowledge.information.InformationCollector

Bases: object

__bool__()
__init__()
__module__ = 'framework.knowledge.information'
__str__()

Return str(self).

add_information(info, initial_trust_value=0)
is_assumption_valid(info)
is_info_class_represented(info_class)
reset_information()
class framework.knowledge.information.InputHandling(value)

Bases: framework.knowledge.information.Info

An enumeration.

Ctrl_Char_Set = 1
Printable_Char_Set = 2
Unknown = 3
__module__ = 'framework.knowledge.information'
class framework.knowledge.information.Language(value)

Bases: framework.knowledge.information.Info

An enumeration.

Ada = 2
C = 1
Pascal = 3
Unknown = 4
__module__ = 'framework.knowledge.information'
class framework.knowledge.information.OS(value)

Bases: framework.knowledge.information.Info

An enumeration.

Android = 3
Linux = 1
Unknown = 4
Windows = 2
__module__ = 'framework.knowledge.information'
class framework.knowledge.information.OperationMode(value)

Bases: framework.knowledge.information.Info

An enumeration.

Determinist = 1
Random = 2
__module__ = 'framework.knowledge.information'
class framework.knowledge.information.Test(value)

Bases: framework.knowledge.information.Info

An enumeration.

Cursory = 1
Deep = 3
Medium = 2
__module__ = 'framework.knowledge.information'
class framework.knowledge.information.TrustLevel(value)

Bases: enum.Enum

An enumeration.

Maximum = 1
Medium = 2
Minimum = 3
__module__ = 'framework.knowledge.information'

13.2.31. framework.constraint_helpers module

class framework.constraint_helpers.CSP(constraints: Optional[framework.constraint_helpers.Constraint] = None, highlight_variables=False)

Bases: object

__copy__()
__init__(constraints: Optional[framework.constraint_helpers.Constraint] = None, highlight_variables=False)
__module__ = 'framework.constraint_helpers'
_constraints = None
_exhausted_solutions = None
_is_solution_queried = False
_model = None
_orig_var_domain = None
_problem = None
_solutions = None
_solve_constraints()
_var_domain = None
_var_domain_updated = False
_var_node_mapping = None
_var_to_varns = None
_vars = None
property exhausted_solutions
from_var_to_varns(var)
get_all_constraints()
get_constraint(idx)
get_solution()
highlight_variables = None
property is_current_solution_queried
iter_vars()
map_var_to_node(var, node)
property nb_constraints
negate_constraint(idx)
next_solution()
reset()
reset_constraint(idx)
restore_var_domains()
save_current_var_domains()
set_var_domain(var, domain)
property var_domain_updated
property var_mapping
class framework.constraint_helpers.Constraint(relation, vars: Tuple, var_to_varns: Optional[dict] = None)

Bases: object

__copy__()
__init__(relation, vars: Tuple, var_to_varns: Optional[dict] = None)
Parameters
  • relation – boolean function that define the constraints between variables

  • vars (list) – list of the names of the nodes used in the boolean function in relation (in the same order as the parameters of the function).

  • var_to_varns (dict) – dictionary that associates for each name in vars, the comprehensive reference to the related node, which is a tuple of its name and its namespace.

__module__ = 'framework.constraint_helpers'
_negated_relation(*args)
_orig_relation = None
_var_domain = None
negate()
relation = None
reset_to_original()
property var_domain
vars = None
exception framework.constraint_helpers.ConstraintError

Bases: Exception

__module__ = 'framework.constraint_helpers'

13.2.32. framework.plumbing module

class framework.plumbing.ExportableFMKOps(fmk)
class framework.plumbing.FmkPlumbing(exit_on_error=False, debug_mode=False, quiet=False, external_term=False, fmkdb_path=None)

Defines the methods to operate every sub-systems of fuddly

_delay_sending()

return False if the user want to stop fuzzing (action possible if delay is set to -1)

_log_directly_retrieved_target_feedback(tg, preamble=None, epilogue=None)

This method is to be used when the target does not make use of Logger.collect_feedback() facility. We thus try to access the feedback from Target directly

_send_data(data_list: Sequence[framework.data.Data])

@data_list: either a list of Data() or a Data()

cleanup_all_dmakers(reset_existing_seed=True)
cleanup_dmaker(dmaker_type=None, name=None, dmaker_obj=None, reset_existing_seed=True, error_on_init=True)
collect_residual_feedback(timeout=0)
disable_fmkdb()
disable_wkspace()
display_color_theme()
dynamic_generator_ids()
empty_data_bank()
empty_workspace()
enable_fmkdb()
enable_wkspace()
exec_dm_tests()
flush_errors()
fmkdb_fetch_data(start_id=1, end_id=- 1)
get_available_targets()
get_data_model_by_name(name)
get_data_models(fmkDB_update=True)
get_error()
get_from_data_bank(i)
get_last_data()
get_operator(name)
get_probe_delay(name)
get_project_by_name(name)
get_projects(fmkDB_update=True)
handle_data_desc(data_desc, resolve_dataprocess=True, original_data=None, save_generator_seed=False, reset_dmakers=False)
is_not_ok()
is_ok()
is_target_enabled()
is_usable()
iter_data_bank()
iter_data_models()
launch()
launch_operator(name, user_input=None, use_existing_seed=True, verbose=False)
launch_probe(name)
load_data_model(dm=None, name=None)
load_multiple_data_model(dm_list=None, name_list=None, reload_dm=False)
load_project(prj=None, name=None)
load_targets(tg_ids)
log_comment(comments)
log_target_residual_feedback()
monitor_probes(prefix=None, force_record=False)
property prj
process_data(action_list, seed=None, valid_gen=False, save_gen_seed=False, reset_dmakers=False)
Parameters

action_list (list) – Shall have a format compatible with what follows [(action_1, UserInput_1), …, (action_n, UserInput_n)] [action_1, (action_2, UserInput_2), … action_n] where action_N can be either: dmaker_type_N or (dmaker_type_N, dmaker_name_N)

process_data_and_send(data_desc=None, id_from_fmkdb=None, id_from_db=None, max_loop=1, tg_ids=None, verbose=False, console_display=True, save_generator_seed=False)

Send data to the selected targets. These data can follow a specific processing before being emitted. The latter depends on what is provided in data_desc.

Parameters
  • data_desc – Can be either a framework.data.DataProcess, a framework.data.Data, the name (str) of an atom of the loaded data models, or a list of the previous types.

  • id_from_fmkdb – Data can be fetched from the FmkDB and send directly to the targets or be used as the seed of a DataProcess if such object is provided in data_desc.

  • id_from_db – Data can be fetched from the Data Bank and send directly to the targets or be used as the seed of a DataProcess if such object is provided in data_desc.

  • max_loop – Maximum number of iteration. -1 one means “infinite” or until some criteria occurs (e.g., a disruptor has exhausted, the end-user issued Ctrl-C, …)

  • tg_ids – Target ID or list of the Target IDs on which data will be sent. If provided it will supersede the tg_ids parameter of any DataProcess provided in data_desc

  • verbose – Pretty print sent data

  • console_display – If False, nothing will be displayed on the screen (that could cause latency)

  • save_generator_seed – If random Generators are used, the generated data will be internally saved and will be reused next time this generator will be called, until FmkPlumbing.cleanup_dmaker(… reset_existing_seed=True) is called on this Generator.

Returns

The list of data that have been sent. None if nothing was sent due to some error.

projects()
register_current_in_data_bank()
register_in_data_bank(data)
register_last_in_data_bank()
reload_all(tg_ids=None)
reload_dm()
retrieve_and_log_target_feedback(residual=False)
run_project(prj=None, name=None, tg_ids=None, dm_name=None)
send_data_and_log(data_list, verbose=False, console_display=True)
set_disruptor_weight(dmaker_type, data_maker_name, weight)
set_error(msg='', context=None, code=- 1)
set_feedback_mode(mode, tg_id=None, do_record=False, do_show=True)
set_feedback_timeout(timeout, tg_id=None, do_record=True, do_show=True)
set_generator_weight(generator_type, data_maker_name, weight)
set_health_check_timeout(timeout, target=None, do_record=True, do_show=True)
set_probe_delay(name, delay)
set_sending_burst_counter(val, do_record=False)
set_sending_delay(delay, do_record=True)
show_and_flush_errors()
show_atom_identifiers()
show_data(data: framework.data.Data, verbose=True)
show_data_bank()
show_data_maker_types()
show_data_models()
show_disruptors(dmaker_type=None)
show_fmk_internals()
show_generators(dmaker_type=None)
show_knowledge()
show_operators()
show_probes()
show_projects()
show_scenario(sc_name, fmt='pdf')
show_targets()
show_tasks()
show_wkspace()
start()
stop()
stop_all_probes()
stop_all_tasks()
stop_probe(name)
switch_feedback_mode(tg_id, do_record=False, do_show=True)
switch_term()
wait_for_target_readiness(forced_feedback_timeout=None)
Parameters

forced_feedback_timeout – should be an integer >= 0 if only feedback need to be checked

Returns:

class framework.plumbing.FmkTask(name, func, arg, period=None, error_func=<function FmkTask.<lambda>>, cleanup_func=<function FmkTask.<lambda>>)
run()

Method representing the thread’s activity.

You may override this method in a subclass. The standard run() method invokes the callable object passed to the object’s constructor as the target argument, if any, with sequential and keyword arguments taken from the args and kwargs arguments, respectively.

stop()
class framework.plumbing.Printer(fmk)
flush()

Flush write buffers, if applicable.

This is not implemented for read-only and non-blocking streams.

print(msg)
start()
stop()
wait_for_sync()
write(data)

Write string to file.

Returns the number of characters written, which is always equal to the length of the string.

13.2.33. libs.utils module

class libs.utils.Accumulator

Bases: object

accumulate(msg)
clear()
class libs.utils.ExternalDisplay

Bases: object

property disp
property is_enabled
property is_terminal
start_term(title=None, keepterm=False)
stop()
class libs.utils.Task(period=None, init_delay=0, new_window=False, new_window_title=None)

Bases: object

cleanup()
dm = None
feedback_gate = None
fmkops = None
period = None
print(msg)
print_nl(msg)
prj = None
setup()
targets = None
class libs.utils.Term(title=None, keepterm=False)

Bases: object

print(s, newline=False)
print_nl(s)
start()
stop()
libs.utils.chunk_lines(string, length, prefix='')
libs.utils.find_file(filename, root_path)
libs.utils.get_caller_object(stack_frame=2)
libs.utils.retrieve_app_handler(filename)