split test + flow
This commit is contained in:
parent
601ccdb278
commit
a51967b238
2
plot.py
2
plot.py
@ -14,7 +14,7 @@ def plottable(log: list[tuple[float, int]]):
|
|||||||
|
|
||||||
def plot(fn: str):
|
def plot(fn: str):
|
||||||
plt.rcParams['figure.figsize'] = [16, 9]
|
plt.rcParams['figure.figsize'] = [16, 9]
|
||||||
plt.style.use("dark_background")
|
plt.style.use('dark_background')
|
||||||
plt.subplots_adjust(left=0.05, right=0.99, top=0.95, bottom=0.05)
|
plt.subplots_adjust(left=0.05, right=0.99, top=0.95, bottom=0.05)
|
||||||
plt.title(fn)
|
plt.title(fn)
|
||||||
plt.xlabel('time (s)')
|
plt.xlabel('time (s)')
|
||||||
|
@ -31,12 +31,12 @@ class ReductionChainProtocol(
|
|||||||
assert isinstance(reductor_factory, RainbowFactory)
|
assert isinstance(reductor_factory, RainbowFactory)
|
||||||
assert isinstance(accumulator_factory, RainbowFactory)
|
assert isinstance(accumulator_factory, RainbowFactory)
|
||||||
|
|
||||||
reduction_factory: RainbowFactory[
|
reducible_factory: RainbowFactory[
|
||||||
Reducible[ReductorType, AccumulatorType]
|
Reducible[ReductorType, AccumulatorType]
|
||||||
] = ReducibleFactory(
|
] = ReducibleFactory(
|
||||||
reductor_factory, accumulator_factory
|
reductor_factory, accumulator_factory
|
||||||
)
|
)
|
||||||
assert isinstance(reduction_factory, RainbowFactory)
|
assert isinstance(reducible_factory, RainbowFactory)
|
||||||
stage_protocol: ActiveStageProtocol[
|
stage_protocol: ActiveStageProtocol[
|
||||||
ReductorType,
|
ReductorType,
|
||||||
AccumulatorType,
|
AccumulatorType,
|
||||||
@ -47,13 +47,13 @@ class ReductionChainProtocol(
|
|||||||
super().__init__(
|
super().__init__(
|
||||||
ActiveStageStateProtocol(
|
ActiveStageStateProtocol(
|
||||||
stage_protocol,
|
stage_protocol,
|
||||||
reduction_factory,
|
reducible_factory,
|
||||||
accumulator_factory,
|
accumulator_factory,
|
||||||
),
|
),
|
||||||
reductor_factory,
|
reductor_factory,
|
||||||
StateStageFactory(
|
StateStageFactory(
|
||||||
stage_protocol,
|
stage_protocol,
|
||||||
reduction_factory,
|
reducible_factory,
|
||||||
accumulator_factory
|
accumulator_factory
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
@ -95,6 +95,10 @@ class Stack(RecursiveMentionable, Generic[ElementType]):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def unit(cls, element: HashPoint[ElementType]) -> 'Stack[ElementType]':
|
||||||
|
return Stack(NullableReference(Null(), StackFactory(element.factory)), element)
|
||||||
|
|
||||||
|
|
||||||
class StackFactory(RainbowFactory[Stack[ElementType]], Generic[ElementType]):
|
class StackFactory(RainbowFactory[Stack[ElementType]], Generic[ElementType]):
|
||||||
def __init__(self, factory: RainbowFactory[ElementType]):
|
def __init__(self, factory: RainbowFactory[ElementType]):
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
from typing import Iterable, Optional, TypeVar
|
from typing import Iterable, Optional, TypeVar
|
||||||
|
|
||||||
__all__ = ('assert_true', 'assert_trues', 'assert_false', 'assert_none', 'assert_eq',)
|
__all__ = ('assert_true', 'assert_trues', 'assert_false', 'assert_none', 'assert_none_strict', 'assert_eq',)
|
||||||
|
|
||||||
|
|
||||||
def assert_true(value: bool) -> bool:
|
def assert_true(value: bool) -> bool:
|
||||||
@ -27,6 +27,11 @@ def assert_none(value: Optional[T]) -> bool:
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def assert_none_strict(value: None) -> bool:
|
||||||
|
assert value is None
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
def assert_eq(value: T, other: T) -> bool:
|
def assert_eq(value: T, other: T) -> bool:
|
||||||
assert value == other
|
assert value == other
|
||||||
return True
|
return True
|
||||||
|
0
rainbowadn/flow/__init__.py
Normal file
0
rainbowadn/flow/__init__.py
Normal file
5
rainbowadn/flow/bridge/__init__.py
Normal file
5
rainbowadn/flow/bridge/__init__.py
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
"""a bridge between old .chain and new .flow
|
||||||
|
todo: deprecate
|
||||||
|
"""
|
||||||
|
from ._stackbridge import *
|
||||||
|
from ._stagebridge import *
|
35
rainbowadn/flow/bridge/_stackbridge.py
Normal file
35
rainbowadn/flow/bridge/_stackbridge.py
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
from typing import Generic, TypeVar
|
||||||
|
|
||||||
|
from rainbowadn.collection.linear import *
|
||||||
|
from rainbowadn.core import *
|
||||||
|
from rainbowadn.flow.core import *
|
||||||
|
from rainbowadn.flow.sequence import *
|
||||||
|
|
||||||
|
__all__ = ('StackBridge',)
|
||||||
|
|
||||||
|
ElementT = TypeVar('ElementT')
|
||||||
|
Out = TypeVar('Out')
|
||||||
|
|
||||||
|
|
||||||
|
class StackBridge(
|
||||||
|
Reducer[SequenceDispatcher[HashPoint[ElementT], Out], Out],
|
||||||
|
Generic[Out, ElementT]
|
||||||
|
):
|
||||||
|
def __init__(self, target: HashPoint[Stack[ElementT]]):
|
||||||
|
assert isinstance(target, HashPoint)
|
||||||
|
self.target = target
|
||||||
|
|
||||||
|
async def reduce(self, reduce: Reduce[SequenceDispatcher[HashPoint[ElementT], Out], Out]) -> Out:
|
||||||
|
resolved: Stack[ElementT] = await self.target.resolve()
|
||||||
|
out: Out = reduce.initial
|
||||||
|
out = await reduce.reduce(out, LastDispatcher(resolved.element))
|
||||||
|
deepest: HashPoint[ElementT] = resolved.element
|
||||||
|
for element in [x async for x in Stack.iter(resolved.previous)]:
|
||||||
|
assert isinstance(element, HashPoint)
|
||||||
|
out = await reduce.reduce(out, PairDispatcher(element, deepest))
|
||||||
|
deepest = element
|
||||||
|
out = await reduce.reduce(out, FirstDispatcher(deepest))
|
||||||
|
return out
|
||||||
|
|
||||||
|
def loose(self) -> Reducer[SequenceDispatcher[HashPoint[ElementT], Out], Out]:
|
||||||
|
return self
|
71
rainbowadn/flow/bridge/_stagebridge.py
Normal file
71
rainbowadn/flow/bridge/_stagebridge.py
Normal file
@ -0,0 +1,71 @@
|
|||||||
|
from typing import Generic, TypeVar
|
||||||
|
|
||||||
|
from rainbowadn.chain.stages import *
|
||||||
|
from rainbowadn.collection.linear import *
|
||||||
|
from rainbowadn.collection.pair import *
|
||||||
|
from rainbowadn.core import *
|
||||||
|
from rainbowadn.flow.core import *
|
||||||
|
from rainbowadn.flow.sequence import *
|
||||||
|
from rainbowadn.flow.verification.stages import *
|
||||||
|
from rainbowadn.nullability import *
|
||||||
|
from ._stackbridge import *
|
||||||
|
|
||||||
|
__all__ = ('StageBridgeVP', 'StageBridgeM',)
|
||||||
|
|
||||||
|
BaseT = TypeVar('BaseT')
|
||||||
|
StageT = TypeVar('StageT')
|
||||||
|
HeaderT = TypeVar('HeaderT')
|
||||||
|
|
||||||
|
|
||||||
|
class StageBridgeVP(
|
||||||
|
StageVerificationProtocol[HashPoint[BaseT], HashPoint[StageT], HashPoint[HeaderT]],
|
||||||
|
Generic[BaseT, StageT, HeaderT]
|
||||||
|
):
|
||||||
|
def __init__(self, stage_protocol: StageProtocol[HeaderT, BaseT, StageT], base_factory: RainbowFactory[BaseT]):
|
||||||
|
assert isinstance(stage_protocol, StageProtocol)
|
||||||
|
assert isinstance(base_factory, RainbowFactory)
|
||||||
|
self.stage_protocol = stage_protocol
|
||||||
|
self.base_factory = base_factory
|
||||||
|
|
||||||
|
async def initial(
|
||||||
|
self, previous: Nullable[HashPoint[BaseT]], header: HashPoint[HeaderT], stage: HashPoint[StageT]
|
||||||
|
) -> bool:
|
||||||
|
assert_true(
|
||||||
|
await self.stage_protocol.verify_header(NullableReference(previous, self.base_factory), header, stage)
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
|
||||||
|
async def final(self, stage: HashPoint[StageT], base: HashPoint[BaseT]) -> bool:
|
||||||
|
assert_true(
|
||||||
|
await self.stage_protocol.verify_state(stage, base)
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
|
||||||
|
async def intermediate(self, previous: HashPoint[StageT], stage: HashPoint[StageT]) -> bool:
|
||||||
|
assert_true(
|
||||||
|
await self.stage_protocol.verify_stage(previous, stage)
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
class StageBridgeM(
|
||||||
|
Mapper[
|
||||||
|
HashPoint[Pair[BaseT, Stack[StageT]]],
|
||||||
|
tuple[HashPoint[BaseT], Reducer[SequenceDispatcher[HashPoint[StageT], bool], bool]]
|
||||||
|
],
|
||||||
|
Generic[BaseT, StageT],
|
||||||
|
):
|
||||||
|
async def map(
|
||||||
|
self,
|
||||||
|
element: HashPoint[Pair[BaseT, Stack[StageT]]],
|
||||||
|
) -> tuple[HashPoint[BaseT], Reducer[SequenceDispatcher[HashPoint[StageT], bool], bool]]:
|
||||||
|
assert isinstance(element, HashPoint)
|
||||||
|
pair: Pair[BaseT, Stack[StageT]] = await element.resolve()
|
||||||
|
assert isinstance(pair, Pair)
|
||||||
|
base: HashPoint[BaseT] = pair.element0
|
||||||
|
assert isinstance(base, HashPoint)
|
||||||
|
stages_stack: HashPoint[Stack[StageT]] = pair.element1
|
||||||
|
assert isinstance(stages_stack, HashPoint)
|
||||||
|
stack_bridge: Reducer[SequenceDispatcher[HashPoint[StageT], bool], bool] = StackBridge(stages_stack).loose()
|
||||||
|
assert isinstance(stack_bridge, Reducer)
|
||||||
|
return base, stack_bridge
|
5
rainbowadn/flow/core/__init__.py
Normal file
5
rainbowadn/flow/core/__init__.py
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
from ._mapper import *
|
||||||
|
from ._mapreduce import *
|
||||||
|
from ._mapreducer import *
|
||||||
|
from ._reduce import *
|
||||||
|
from ._reducer import *
|
11
rainbowadn/flow/core/_mapper.py
Normal file
11
rainbowadn/flow/core/_mapper.py
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
from typing import Generic, TypeVar
|
||||||
|
|
||||||
|
__all__ = ('Mapper',)
|
||||||
|
|
||||||
|
Element = TypeVar('Element')
|
||||||
|
Mapped = TypeVar('Mapped')
|
||||||
|
|
||||||
|
|
||||||
|
class Mapper(Generic[Element, Mapped]):
|
||||||
|
async def map(self, element: Element) -> Mapped:
|
||||||
|
raise NotImplementedError
|
25
rainbowadn/flow/core/_mapreduce.py
Normal file
25
rainbowadn/flow/core/_mapreduce.py
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
from typing import Generic, TypeVar
|
||||||
|
|
||||||
|
from ._mapper import *
|
||||||
|
from ._reduce import *
|
||||||
|
|
||||||
|
__all__ = ('MapReduce',)
|
||||||
|
|
||||||
|
Element = TypeVar('Element')
|
||||||
|
Mapped = TypeVar('Mapped')
|
||||||
|
Out = TypeVar('Out')
|
||||||
|
|
||||||
|
|
||||||
|
class MapReduce(Reduce[Element, Out], Generic[Element, Out, Mapped]):
|
||||||
|
def __init__(self, mapper: Mapper[Element, Mapped], reduce: Reduce[Mapped, Out]):
|
||||||
|
assert isinstance(mapper, Mapper)
|
||||||
|
assert isinstance(reduce, Reduce)
|
||||||
|
super().__init__(reduce.initial)
|
||||||
|
self.mapper = mapper
|
||||||
|
self.reduce_mapped = reduce
|
||||||
|
|
||||||
|
async def reduce(self, out: Out, element: Element) -> Out:
|
||||||
|
return await self.reduce_mapped.reduce(out, await self.mapper.map(element))
|
||||||
|
|
||||||
|
def loose(self) -> Reduce[Element, Out]:
|
||||||
|
return self
|
24
rainbowadn/flow/core/_mapreducer.py
Normal file
24
rainbowadn/flow/core/_mapreducer.py
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
from typing import Generic, TypeVar
|
||||||
|
|
||||||
|
from ._mapper import *
|
||||||
|
from ._mapreduce import *
|
||||||
|
from ._reduce import *
|
||||||
|
from ._reducer import *
|
||||||
|
|
||||||
|
__all__ = ('MapReducer',)
|
||||||
|
|
||||||
|
Element = TypeVar('Element')
|
||||||
|
Mapped = TypeVar('Mapped')
|
||||||
|
Out = TypeVar('Out')
|
||||||
|
|
||||||
|
|
||||||
|
class MapReducer(Reducer[Mapped, Out], Generic[Mapped, Out, Element]):
|
||||||
|
def __init__(self, mapper: Mapper[Element, Mapped], reducer: Reducer[Element, Out]):
|
||||||
|
assert isinstance(mapper, Mapper)
|
||||||
|
assert isinstance(reducer, Reducer)
|
||||||
|
self.mapper = mapper
|
||||||
|
self.reducer = reducer
|
||||||
|
|
||||||
|
async def reduce(self, reduce: Reduce[Mapped, Out]) -> Out:
|
||||||
|
assert isinstance(reduce, Reduce)
|
||||||
|
return self.reducer.reduce(MapReduce(self.mapper, reduce))
|
14
rainbowadn/flow/core/_reduce.py
Normal file
14
rainbowadn/flow/core/_reduce.py
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
from typing import Generic, TypeVar
|
||||||
|
|
||||||
|
__all__ = ('Reduce',)
|
||||||
|
|
||||||
|
Element = TypeVar('Element')
|
||||||
|
Out = TypeVar('Out')
|
||||||
|
|
||||||
|
|
||||||
|
class Reduce(Generic[Element, Out]):
|
||||||
|
def __init__(self, initial: Out):
|
||||||
|
self.initial = initial
|
||||||
|
|
||||||
|
async def reduce(self, out: Out, element: Element) -> Out:
|
||||||
|
raise NotImplementedError
|
13
rainbowadn/flow/core/_reducer.py
Normal file
13
rainbowadn/flow/core/_reducer.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
from typing import Generic, TypeVar
|
||||||
|
|
||||||
|
from ._reduce import *
|
||||||
|
|
||||||
|
__all__ = ('Reducer',)
|
||||||
|
|
||||||
|
Element = TypeVar('Element')
|
||||||
|
Out = TypeVar('Out')
|
||||||
|
|
||||||
|
|
||||||
|
class Reducer(Generic[Element, Out]):
|
||||||
|
async def reduce(self, reduce: Reduce[Element, Out]) -> Out:
|
||||||
|
raise NotImplementedError
|
3
rainbowadn/flow/sequence/__init__.py
Normal file
3
rainbowadn/flow/sequence/__init__.py
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
from ._dispatchmapper import *
|
||||||
|
from ._sequencedispatch import *
|
||||||
|
from ._sequencedispatcher import *
|
26
rainbowadn/flow/sequence/_dispatchmapper.py
Normal file
26
rainbowadn/flow/sequence/_dispatchmapper.py
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
from typing import Generic, TypeVar
|
||||||
|
|
||||||
|
from rainbowadn.flow.core import *
|
||||||
|
from ._sequencedispatch import *
|
||||||
|
from ._sequencedispatcher import *
|
||||||
|
|
||||||
|
__all__ = ('DispatchMapper',)
|
||||||
|
|
||||||
|
Element = TypeVar('Element')
|
||||||
|
Out = TypeVar('Out')
|
||||||
|
|
||||||
|
|
||||||
|
class DispatchMapper(
|
||||||
|
Mapper[SequenceDispatcher[Element, Out], Out],
|
||||||
|
Generic[Out, Element]
|
||||||
|
):
|
||||||
|
def __init__(self, dispatch: SequenceDispatch[Element, Out]):
|
||||||
|
assert isinstance(dispatch, SequenceDispatch)
|
||||||
|
self.dispatch = dispatch
|
||||||
|
|
||||||
|
async def map(self, element: SequenceDispatcher[Element, Out]) -> Out:
|
||||||
|
assert isinstance(element, SequenceDispatcher)
|
||||||
|
return await element.dispatch(self.dispatch)
|
||||||
|
|
||||||
|
def loose(self) -> Mapper[SequenceDispatcher[Element, Out], Out]:
|
||||||
|
return self
|
17
rainbowadn/flow/sequence/_sequencedispatch.py
Normal file
17
rainbowadn/flow/sequence/_sequencedispatch.py
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
from typing import Generic, TypeVar
|
||||||
|
|
||||||
|
__all__ = ('SequenceDispatch',)
|
||||||
|
|
||||||
|
Element = TypeVar('Element')
|
||||||
|
Out = TypeVar('Out')
|
||||||
|
|
||||||
|
|
||||||
|
class SequenceDispatch(Generic[Element, Out]):
|
||||||
|
async def on_first(self, element: Element) -> Out:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def on_last(self, element: Element) -> Out:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def on_pair(self, previous: Element, element: Element) -> Out:
|
||||||
|
raise NotImplementedError
|
41
rainbowadn/flow/sequence/_sequencedispatcher.py
Normal file
41
rainbowadn/flow/sequence/_sequencedispatcher.py
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
from typing import Generic, TypeVar
|
||||||
|
|
||||||
|
from ._sequencedispatch import *
|
||||||
|
|
||||||
|
__all__ = ('SequenceDispatcher', 'FirstDispatcher', 'LastDispatcher', 'PairDispatcher',)
|
||||||
|
|
||||||
|
Element = TypeVar('Element')
|
||||||
|
Out = TypeVar('Out')
|
||||||
|
|
||||||
|
|
||||||
|
class SequenceDispatcher(Generic[Element, Out]):
|
||||||
|
async def dispatch(self, dispatch: SequenceDispatch[Element, Out]) -> Out:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
|
class FirstDispatcher(SequenceDispatcher[Element, Out]):
|
||||||
|
def __init__(self, element: Element):
|
||||||
|
self.element = element
|
||||||
|
|
||||||
|
async def dispatch(self, dispatch: SequenceDispatch[Element, Out]) -> Out:
|
||||||
|
assert isinstance(dispatch, SequenceDispatch)
|
||||||
|
return await dispatch.on_first(self.element)
|
||||||
|
|
||||||
|
|
||||||
|
class LastDispatcher(SequenceDispatcher[Element, Out]):
|
||||||
|
def __init__(self, element: Element):
|
||||||
|
self.element = element
|
||||||
|
|
||||||
|
async def dispatch(self, dispatch: SequenceDispatch[Element, Out]) -> Out:
|
||||||
|
assert isinstance(dispatch, SequenceDispatch)
|
||||||
|
return await dispatch.on_last(self.element)
|
||||||
|
|
||||||
|
|
||||||
|
class PairDispatcher(SequenceDispatcher[Element, Out]):
|
||||||
|
def __init__(self, previous: Element, element: Element):
|
||||||
|
self.previous = previous
|
||||||
|
self.element = element
|
||||||
|
|
||||||
|
async def dispatch(self, dispatch: SequenceDispatch[Element, Out]) -> Out:
|
||||||
|
assert isinstance(dispatch, SequenceDispatch)
|
||||||
|
return await dispatch.on_pair(self.previous, self.element)
|
2
rainbowadn/flow/stacked/__init__.py
Normal file
2
rainbowadn/flow/stacked/__init__.py
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
from ._stackedreduce import *
|
||||||
|
from ._stackedreducer import *
|
26
rainbowadn/flow/stacked/_stackeddispatch.py
Normal file
26
rainbowadn/flow/stacked/_stackeddispatch.py
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
from typing import Generic, TypeVar
|
||||||
|
|
||||||
|
from rainbowadn.flow.core import *
|
||||||
|
from rainbowadn.flow.sequence import *
|
||||||
|
from rainbowadn.nullability import *
|
||||||
|
|
||||||
|
__all__ = ('StackedDispatch',)
|
||||||
|
|
||||||
|
Element = TypeVar('Element')
|
||||||
|
Out = TypeVar('Out')
|
||||||
|
|
||||||
|
|
||||||
|
class StackedDispatch(SequenceDispatch[Element, Out], Generic[Element, Out]):
|
||||||
|
def __init__(self, stacked: Reduce[tuple[Nullable[Element], Element], Out], out: Out):
|
||||||
|
assert isinstance(stacked, Reduce)
|
||||||
|
self.stacked = stacked
|
||||||
|
self.out = out
|
||||||
|
|
||||||
|
async def on_first(self, element: Element) -> Out:
|
||||||
|
return await self.stacked.reduce(self.out, (Null(), element))
|
||||||
|
|
||||||
|
async def on_last(self, element: Element) -> Out:
|
||||||
|
return self.out
|
||||||
|
|
||||||
|
async def on_pair(self, previous: Element, element: Element) -> Out:
|
||||||
|
return await self.stacked.reduce(self.out, (NotNull(previous), element))
|
28
rainbowadn/flow/stacked/_stackedreduce.py
Normal file
28
rainbowadn/flow/stacked/_stackedreduce.py
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
from typing import Generic, TypeVar
|
||||||
|
|
||||||
|
from rainbowadn.flow.core import *
|
||||||
|
from rainbowadn.flow.sequence import *
|
||||||
|
from rainbowadn.nullability import *
|
||||||
|
from ._stackeddispatch import *
|
||||||
|
|
||||||
|
__all__ = ('StackedReduce',)
|
||||||
|
|
||||||
|
Stacked = TypeVar('Stacked')
|
||||||
|
Out = TypeVar('Out')
|
||||||
|
|
||||||
|
|
||||||
|
class StackedReduce(
|
||||||
|
Reduce[SequenceDispatcher[Stacked, Out], Out],
|
||||||
|
Generic[Out, Stacked]
|
||||||
|
):
|
||||||
|
def __init__(self, stacked: Reduce[tuple[Nullable[Stacked], Stacked], Out]):
|
||||||
|
assert isinstance(stacked, Reduce)
|
||||||
|
super().__init__(stacked.initial)
|
||||||
|
self.stacked = stacked
|
||||||
|
|
||||||
|
async def reduce(self, out: Out, element: SequenceDispatcher[Stacked, Out]) -> Out:
|
||||||
|
assert isinstance(element, SequenceDispatcher)
|
||||||
|
return await element.dispatch(StackedDispatch(self.stacked, out))
|
||||||
|
|
||||||
|
def loose(self) -> Reduce[SequenceDispatcher[Stacked, Out], Out]:
|
||||||
|
return self
|
30
rainbowadn/flow/stacked/_stackedreducer.py
Normal file
30
rainbowadn/flow/stacked/_stackedreducer.py
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
from typing import Generic, TypeVar
|
||||||
|
|
||||||
|
from rainbowadn.flow.core import *
|
||||||
|
from rainbowadn.flow.sequence import *
|
||||||
|
from rainbowadn.nullability import *
|
||||||
|
from ._stackedreduce import *
|
||||||
|
|
||||||
|
__all__ = ('StackedReducer',)
|
||||||
|
|
||||||
|
Stacked = TypeVar('Stacked')
|
||||||
|
Out = TypeVar('Out')
|
||||||
|
|
||||||
|
|
||||||
|
class StackedReducer(
|
||||||
|
Reducer[tuple[Nullable[Stacked], Stacked], Out],
|
||||||
|
Generic[Out, Stacked]
|
||||||
|
):
|
||||||
|
def __init__(self, stacked: Reducer[SequenceDispatcher[Stacked, Out], Out]):
|
||||||
|
assert isinstance(stacked, Reducer)
|
||||||
|
self.stacked = stacked
|
||||||
|
|
||||||
|
async def reduce(self, reduce: Reduce[tuple[Nullable[Stacked], Stacked], Out]) -> Out:
|
||||||
|
assert isinstance(reduce, Reduce)
|
||||||
|
stacked_reduce: StackedReduce[Out, Stacked] = StackedReduce(reduce)
|
||||||
|
dispatcher_reduce: Reduce[SequenceDispatcher[Stacked, Out], Out] = stacked_reduce.loose()
|
||||||
|
assert isinstance(dispatcher_reduce, Reduce)
|
||||||
|
return await self.stacked.reduce(dispatcher_reduce)
|
||||||
|
|
||||||
|
def loose(self) -> Reducer[tuple[Nullable[Stacked], Stacked], Out]:
|
||||||
|
return self
|
0
rainbowadn/flow/verification/__init__.py
Normal file
0
rainbowadn/flow/verification/__init__.py
Normal file
4
rainbowadn/flow/verification/core/__init__.py
Normal file
4
rainbowadn/flow/verification/core/__init__.py
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
from ._mapperverification import *
|
||||||
|
from ._reduceverification import *
|
||||||
|
from ._verification import *
|
||||||
|
from ._verifyreduce import *
|
17
rainbowadn/flow/verification/core/_mapperverification.py
Normal file
17
rainbowadn/flow/verification/core/_mapperverification.py
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
from typing import Generic, TypeVar
|
||||||
|
|
||||||
|
from rainbowadn.flow.core import *
|
||||||
|
from ._verification import *
|
||||||
|
|
||||||
|
__all__ = ('MapperVerification',)
|
||||||
|
|
||||||
|
Verified = TypeVar('Verified')
|
||||||
|
|
||||||
|
|
||||||
|
class MapperVerification(Verification[Verified], Generic[Verified]):
|
||||||
|
def __init__(self, mapper: Mapper[Verified, bool]):
|
||||||
|
assert isinstance(mapper, Mapper)
|
||||||
|
self.mapper = mapper
|
||||||
|
|
||||||
|
async def verify(self, element: Verified) -> bool:
|
||||||
|
return await self.mapper.map(element)
|
37
rainbowadn/flow/verification/core/_reduceverification.py
Normal file
37
rainbowadn/flow/verification/core/_reduceverification.py
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
from typing import Generic, TypeVar
|
||||||
|
|
||||||
|
from rainbowadn.core import *
|
||||||
|
from rainbowadn.flow.core import *
|
||||||
|
from ._verification import *
|
||||||
|
from ._verifyreduce import *
|
||||||
|
|
||||||
|
__all__ = ('ReduceVerification',)
|
||||||
|
|
||||||
|
Verified = TypeVar('Verified')
|
||||||
|
|
||||||
|
|
||||||
|
class ReduceVerification(
|
||||||
|
Verification[Reducer[Verified, bool]],
|
||||||
|
Generic[Verified],
|
||||||
|
):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
verification: Verification[Verified]
|
||||||
|
):
|
||||||
|
assert isinstance(verification, Mapper)
|
||||||
|
self.verification = verification
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _verify_reduce(cls) -> Reduce[bool, bool]:
|
||||||
|
return VerifyReduce()
|
||||||
|
|
||||||
|
def _reduce(self) -> Reduce[Verified, bool]:
|
||||||
|
return MapReduce(self.verification, self._verify_reduce())
|
||||||
|
|
||||||
|
async def verify(self, element: Reducer[Verified, bool]) -> bool:
|
||||||
|
assert isinstance(element, Reducer)
|
||||||
|
assert_true(await element.reduce(self._reduce()))
|
||||||
|
return True
|
||||||
|
|
||||||
|
def loose(self) -> Verification[Reducer[Verified, bool]]:
|
||||||
|
return self
|
23
rainbowadn/flow/verification/core/_verification.py
Normal file
23
rainbowadn/flow/verification/core/_verification.py
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
from typing import Generic, TypeVar
|
||||||
|
|
||||||
|
from rainbowadn.core import *
|
||||||
|
from rainbowadn.flow.core import *
|
||||||
|
|
||||||
|
__all__ = ('Verification',)
|
||||||
|
|
||||||
|
Verified = TypeVar('Verified')
|
||||||
|
|
||||||
|
|
||||||
|
class Verification(
|
||||||
|
Mapper[Verified, bool],
|
||||||
|
Generic[Verified]
|
||||||
|
):
|
||||||
|
async def map(self, element: Verified) -> bool:
|
||||||
|
assert_true(await self.verify(element))
|
||||||
|
return True
|
||||||
|
|
||||||
|
async def verify(self, element: Verified) -> bool:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def as_mapper(self) -> Mapper[Verified, bool]:
|
||||||
|
return self
|
14
rainbowadn/flow/verification/core/_verifyreduce.py
Normal file
14
rainbowadn/flow/verification/core/_verifyreduce.py
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
from rainbowadn.core import *
|
||||||
|
from rainbowadn.flow.core import *
|
||||||
|
|
||||||
|
__all__ = ('VerifyReduce',)
|
||||||
|
|
||||||
|
|
||||||
|
class VerifyReduce(Reduce[bool, bool]):
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__(True)
|
||||||
|
|
||||||
|
async def reduce(self, out: bool, element: bool) -> bool:
|
||||||
|
assert_true(out)
|
||||||
|
assert_true(element)
|
||||||
|
return True
|
2
rainbowadn/flow/verification/stages/__init__.py
Normal file
2
rainbowadn/flow/verification/stages/__init__.py
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
from ._stageverification import *
|
||||||
|
from ._stageverificationprotocol import *
|
71
rainbowadn/flow/verification/stages/_stageverification.py
Normal file
71
rainbowadn/flow/verification/stages/_stageverification.py
Normal file
@ -0,0 +1,71 @@
|
|||||||
|
from typing import Generic, TypeVar
|
||||||
|
|
||||||
|
from rainbowadn.core import *
|
||||||
|
from rainbowadn.flow.core import *
|
||||||
|
from rainbowadn.flow.sequence import *
|
||||||
|
from rainbowadn.flow.verification.core import *
|
||||||
|
from rainbowadn.nullability import *
|
||||||
|
from ._stageverificationdispatch import *
|
||||||
|
from ._stageverificationprotocol import *
|
||||||
|
|
||||||
|
__all__ = ('StageVerification',)
|
||||||
|
|
||||||
|
Base = TypeVar('Base')
|
||||||
|
Stage = TypeVar('Stage')
|
||||||
|
Header = TypeVar('Header')
|
||||||
|
State = TypeVar('State')
|
||||||
|
|
||||||
|
|
||||||
|
class StageVerification(
|
||||||
|
Verification[tuple[Nullable[State], Header, State]],
|
||||||
|
Generic[Base, Stage, Header, State]
|
||||||
|
):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
protocol: StageVerificationProtocol[Base, Stage, Header],
|
||||||
|
mapper: Mapper[State, tuple[Base, Reducer[SequenceDispatcher[Stage, bool], bool]]],
|
||||||
|
):
|
||||||
|
assert isinstance(protocol, StageVerificationProtocol)
|
||||||
|
assert isinstance(mapper, Mapper)
|
||||||
|
self.protocol = protocol
|
||||||
|
self.mapper = mapper
|
||||||
|
|
||||||
|
async def _base(self, state: Nullable[State]) -> Nullable[State]:
|
||||||
|
assert isinstance(state, Nullable)
|
||||||
|
if state.null():
|
||||||
|
return Null()
|
||||||
|
else:
|
||||||
|
base: Base
|
||||||
|
base, _ = await self.mapper.map(state.resolve())
|
||||||
|
return NotNull(base)
|
||||||
|
|
||||||
|
async def verify(self, element: tuple[Nullable[State], Header, State]) -> bool:
|
||||||
|
assert isinstance(element, tuple)
|
||||||
|
previous: Nullable[State]
|
||||||
|
header: Header
|
||||||
|
state: State
|
||||||
|
previous, header, state = element
|
||||||
|
assert isinstance(previous, Nullable)
|
||||||
|
previous_base: Nullable[State]
|
||||||
|
base: Base
|
||||||
|
reducer: Reducer[SequenceDispatcher[Stage, bool], bool]
|
||||||
|
previous_base, (base, reducer) = await gather(
|
||||||
|
self._base(previous),
|
||||||
|
self.mapper.map(state),
|
||||||
|
)
|
||||||
|
assert isinstance(previous_base, Nullable)
|
||||||
|
assert isinstance(reducer, Reducer)
|
||||||
|
verification: Verification[Reducer[SequenceDispatcher[Stage, bool], bool]] = StageVerificationDispatch(
|
||||||
|
self.protocol,
|
||||||
|
previous_base,
|
||||||
|
header,
|
||||||
|
base
|
||||||
|
).reduce_verification()
|
||||||
|
assert isinstance(verification, Verification)
|
||||||
|
assert_true(
|
||||||
|
await verification.verify(reducer)
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
|
||||||
|
def loose(self) -> Verification[tuple[Nullable[State], Header, State]]:
|
||||||
|
return self
|
@ -0,0 +1,51 @@
|
|||||||
|
from typing import Generic, TypeVar
|
||||||
|
|
||||||
|
from rainbowadn.core import *
|
||||||
|
from rainbowadn.flow.core import *
|
||||||
|
from rainbowadn.flow.sequence import *
|
||||||
|
from rainbowadn.flow.verification.core import *
|
||||||
|
from rainbowadn.nullability import *
|
||||||
|
from ._stageverificationprotocol import *
|
||||||
|
|
||||||
|
__all__ = ('StageVerificationDispatch',)
|
||||||
|
|
||||||
|
Base = TypeVar('Base')
|
||||||
|
Stage = TypeVar('Stage')
|
||||||
|
Header = TypeVar('Header')
|
||||||
|
|
||||||
|
|
||||||
|
class StageVerificationDispatch(
|
||||||
|
SequenceDispatch[Stage, bool],
|
||||||
|
Generic[Base, Stage, Header]
|
||||||
|
):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
protocol: StageVerificationProtocol[Base, Stage, Header],
|
||||||
|
previous: Nullable[Base],
|
||||||
|
header: Header,
|
||||||
|
base: Base,
|
||||||
|
):
|
||||||
|
assert isinstance(protocol, StageVerificationProtocol)
|
||||||
|
assert isinstance(previous, Nullable)
|
||||||
|
self.protocol = protocol
|
||||||
|
self.previous = previous
|
||||||
|
self.header = header
|
||||||
|
self.base = base
|
||||||
|
|
||||||
|
async def on_first(self, element: Stage) -> bool:
|
||||||
|
assert_true(await self.protocol.initial(self.previous, self.header, element))
|
||||||
|
return True
|
||||||
|
|
||||||
|
async def on_last(self, element: Stage) -> bool:
|
||||||
|
assert_true(await self.protocol.final(element, self.base))
|
||||||
|
return True
|
||||||
|
|
||||||
|
async def on_pair(self, previous: Stage, element: Stage) -> bool:
|
||||||
|
assert_true(await self.protocol.intermediate(previous, element))
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _verification(self) -> Verification[SequenceDispatcher[Stage, bool]]:
|
||||||
|
return MapperVerification(DispatchMapper(self).loose())
|
||||||
|
|
||||||
|
def reduce_verification(self) -> Verification[Reducer[SequenceDispatcher[Stage, bool], bool]]:
|
||||||
|
return ReduceVerification(self._verification()).loose()
|
@ -0,0 +1,20 @@
|
|||||||
|
from typing import Generic, TypeVar
|
||||||
|
|
||||||
|
from rainbowadn.nullability import *
|
||||||
|
|
||||||
|
__all__ = ('StageVerificationProtocol',)
|
||||||
|
|
||||||
|
Base = TypeVar('Base')
|
||||||
|
Stage = TypeVar('Stage')
|
||||||
|
Header = TypeVar('Header')
|
||||||
|
|
||||||
|
|
||||||
|
class StageVerificationProtocol(Generic[Base, Stage, Header]):
|
||||||
|
async def initial(self, previous: Nullable[Base], header: Header, stage: Stage) -> bool:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def final(self, stage: Stage, base: Base) -> bool:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def intermediate(self, previous: Stage, stage: Stage) -> bool:
|
||||||
|
raise NotImplementedError
|
57
rainbowadn/flow/verification/stateverification.py
Normal file
57
rainbowadn/flow/verification/stateverification.py
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
from typing import Generic, TypeVar
|
||||||
|
|
||||||
|
from rainbowadn.core import *
|
||||||
|
from rainbowadn.flow.core import *
|
||||||
|
from rainbowadn.flow.verification.core import *
|
||||||
|
from rainbowadn.nullability import *
|
||||||
|
|
||||||
|
__all__ = ('StateVerification',)
|
||||||
|
|
||||||
|
Header = TypeVar('Header')
|
||||||
|
State = TypeVar('State')
|
||||||
|
Chain = TypeVar('Chain')
|
||||||
|
|
||||||
|
|
||||||
|
class StateVerification(
|
||||||
|
Verification[tuple[Nullable[Chain], Chain]],
|
||||||
|
Generic[Header, State, Chain]
|
||||||
|
):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
mapper: Mapper[Chain, tuple[Header, State]],
|
||||||
|
verification: Verification[tuple[Nullable[State], Header, State]]
|
||||||
|
):
|
||||||
|
assert isinstance(mapper, Mapper)
|
||||||
|
assert isinstance(verification, Mapper)
|
||||||
|
self.mapper = mapper
|
||||||
|
self.verification = verification
|
||||||
|
|
||||||
|
async def _state(self, chain: Nullable[Chain]) -> Nullable[State]:
|
||||||
|
assert isinstance(chain, Nullable)
|
||||||
|
if chain.null():
|
||||||
|
return Null()
|
||||||
|
else:
|
||||||
|
state: State
|
||||||
|
_, state = await self.mapper.map(chain.resolve())
|
||||||
|
return NotNull(state)
|
||||||
|
|
||||||
|
async def _tuple(self, element: tuple[Nullable[Chain], Chain]) -> tuple[Nullable[State], Header, State]:
|
||||||
|
assert isinstance(element, tuple)
|
||||||
|
previous: Nullable[Chain]
|
||||||
|
chain: Chain
|
||||||
|
previous, chain = element
|
||||||
|
assert isinstance(previous, Nullable)
|
||||||
|
previous_state: Nullable[State]
|
||||||
|
header: Header
|
||||||
|
state: State
|
||||||
|
previous_state, (header, state) = await gather(
|
||||||
|
self._state(previous),
|
||||||
|
self.mapper.map(chain),
|
||||||
|
)
|
||||||
|
assert isinstance(previous_state, Nullable)
|
||||||
|
return previous_state, header, state
|
||||||
|
|
||||||
|
async def verify(self, element: tuple[Nullable[Chain], Chain]) -> bool:
|
||||||
|
assert isinstance(element, tuple)
|
||||||
|
assert_true(await self.verification.map(await self._tuple(element)))
|
||||||
|
return True
|
@ -1,4 +1,5 @@
|
|||||||
from .cachingresolver import CachingResolver
|
from .cachingresolver import *
|
||||||
from .delayedresolver import DelayedResolver
|
from .defaultresolver import *
|
||||||
from .dictresolver import DictResolver
|
from .delayedresolver import *
|
||||||
from .failresolver import FailResolver
|
from .dictresolver import *
|
||||||
|
from .failresolver import *
|
||||||
|
9
rainbowadn/testing/resolvers/defaultresolver.py
Normal file
9
rainbowadn/testing/resolvers/defaultresolver.py
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
from .dictresolver import *
|
||||||
|
|
||||||
|
__all__ = ('default_resolver',)
|
||||||
|
|
||||||
|
|
||||||
|
def default_resolver():
|
||||||
|
dr = DictResolver()
|
||||||
|
# dr = DelayedResolver(dr, lambda: 0.000)
|
||||||
|
return dr
|
@ -1,202 +0,0 @@
|
|||||||
import os
|
|
||||||
import string
|
|
||||||
import time
|
|
||||||
import unittest
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
import nacl.signing
|
|
||||||
|
|
||||||
from rainbowadn.atomic import *
|
|
||||||
from rainbowadn.chain import *
|
|
||||||
from rainbowadn.collection.comparison import *
|
|
||||||
from rainbowadn.collection.linear import *
|
|
||||||
from rainbowadn.collection.pair import *
|
|
||||||
from rainbowadn.collection.trees.binary import *
|
|
||||||
from rainbowadn.core import *
|
|
||||||
from rainbowadn.encryption import *
|
|
||||||
from rainbowadn.instrument import *
|
|
||||||
from rainbowadn.nullability import *
|
|
||||||
from rainbowadn.v13 import *
|
|
||||||
from rainbowadn.wrisbt import *
|
|
||||||
from .resolvers import *
|
|
||||||
|
|
||||||
|
|
||||||
class TestAll(unittest.IsolatedAsyncioTestCase):
|
|
||||||
"""examples rather than real tests"""
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def dr(cls) -> ExtendableResolver:
|
|
||||||
dr = DictResolver()
|
|
||||||
# dr = DelayedResolver(dr, lambda: 0.000)
|
|
||||||
return dr
|
|
||||||
|
|
||||||
async def test_bankchain(self):
|
|
||||||
set_gather_linear()
|
|
||||||
with self.subTest('create empty'):
|
|
||||||
bank: BankChain = BankChain.empty(ReductionChainMetaFactory().loose())
|
|
||||||
with self.subTest('prepare transactions'):
|
|
||||||
key_0 = nacl.signing.SigningKey.generate()
|
|
||||||
transaction_0 = Transaction.make(
|
|
||||||
[],
|
|
||||||
[CoinData.of(Subject(key_0.verify_key), 1_000_000)],
|
|
||||||
[]
|
|
||||||
)
|
|
||||||
coin_0, coin_1 = await transaction_0.coins(MINT_CONST, NotNull(HashPoint.of(Subject(key_0.verify_key))))
|
|
||||||
with self.subTest('add transactions'):
|
|
||||||
bank = await bank.adds(
|
|
||||||
[
|
|
||||||
transaction_0,
|
|
||||||
Transaction.make(
|
|
||||||
[coin_1],
|
|
||||||
[CoinData.of(Subject(nacl.signing.SigningKey.generate().verify_key), 10_000)],
|
|
||||||
[key_0]
|
|
||||||
),
|
|
||||||
]
|
|
||||||
)
|
|
||||||
with self.subTest('add empty'):
|
|
||||||
bank = await bank.adds(
|
|
||||||
[]
|
|
||||||
)
|
|
||||||
print(await bank.reference.str(0))
|
|
||||||
with self.subTest('verify'):
|
|
||||||
assert_true(await bank.verify())
|
|
||||||
with self.subTest('recover'):
|
|
||||||
bank = BankChain.from_reference(
|
|
||||||
ReductionChainMetaFactory(), await self.dr().migrate_resolved(bank.reference)
|
|
||||||
)
|
|
||||||
set_gather_asyncio()
|
|
||||||
print('recovering')
|
|
||||||
print(await bank.reference.str(0))
|
|
||||||
print('recovered')
|
|
||||||
with self.subTest('verify'):
|
|
||||||
assert_true(await bank.verify())
|
|
||||||
|
|
||||||
async def test_wrisbt(self):
|
|
||||||
set_gather_linear()
|
|
||||||
with self.subTest('setup'):
|
|
||||||
stoptime = time.process_time()
|
|
||||||
|
|
||||||
def measure(message: str) -> float:
|
|
||||||
nonlocal stoptime
|
|
||||||
now = time.process_time()
|
|
||||||
delta = now - stoptime
|
|
||||||
print(message, delta)
|
|
||||||
stoptime = now
|
|
||||||
return delta
|
|
||||||
|
|
||||||
n = 5000
|
|
||||||
keysize = 7
|
|
||||||
with self.subTest('create empty'):
|
|
||||||
btree: WrisbtRoot = WrisbtRoot.empty(WrisbtParametres(5, keysize))
|
|
||||||
measure('init')
|
|
||||||
with self.subTest('add keys', n=n):
|
|
||||||
for _ in range(n):
|
|
||||||
key = os.urandom(keysize)
|
|
||||||
assert_false(await btree.contains(key))
|
|
||||||
btree = await btree.add(key)
|
|
||||||
assert_true(await btree.contains(key))
|
|
||||||
measure('add')
|
|
||||||
with self.subTest('save'):
|
|
||||||
btree = await self.dr().migrate_resolved(btree)
|
|
||||||
measure('save')
|
|
||||||
set_gather_asyncio()
|
|
||||||
with self.subTest('resolve and iterate'):
|
|
||||||
assert_eq(len(await btree.keys()), n)
|
|
||||||
print(btree.height)
|
|
||||||
measure('resolve and iterate')
|
|
||||||
with self.subTest('resolve and add', n=n):
|
|
||||||
for _ in range(n):
|
|
||||||
key = os.urandom(keysize)
|
|
||||||
assert_false(await btree.contains(key))
|
|
||||||
btree = await btree.add(key)
|
|
||||||
assert_true(await btree.contains(key))
|
|
||||||
print(btree.height)
|
|
||||||
measure('resolve and add')
|
|
||||||
|
|
||||||
async def test_wrisbt_index(self):
|
|
||||||
set_gather_linear()
|
|
||||||
with self.subTest('create empty'):
|
|
||||||
factory: RainbowFactory[Pair[Plain, Plain]] = PairFactory(Plain.factory(), Plain.factory()).loose()
|
|
||||||
chain: ChainCollectionInterface[Any, Pair[Plain, Plain], WrisbtRoot] = BlockChainFactory(
|
|
||||||
WrisbtChainProtocol(factory, 2).loose()
|
|
||||||
).empty().loose()
|
|
||||||
with self.subTest('fill'):
|
|
||||||
for _ in range(100):
|
|
||||||
chain = await chain.add(
|
|
||||||
HashPoint.of(
|
|
||||||
Pair(
|
|
||||||
HashPoint.of(Plain(os.urandom(16))),
|
|
||||||
HashPoint.of(Plain(os.urandom(16)))
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
with self.subTest('check'):
|
|
||||||
set_gather_asyncio()
|
|
||||||
assert_true(await chain.verify())
|
|
||||||
with self.subTest('measure height'):
|
|
||||||
reference = await chain.actual_state()
|
|
||||||
assert not reference.null()
|
|
||||||
print((await reference.resolve()).height)
|
|
||||||
|
|
||||||
async def test_avl(self):
|
|
||||||
set_gather_linear()
|
|
||||||
tree: ActiveBinaryTree[Plain, Integer] = ActiveBinaryTree.empty(
|
|
||||||
AVL(PlainComparator(Replace())), Plain.factory()
|
|
||||||
)
|
|
||||||
for i in range(26):
|
|
||||||
tree = await tree.add(HashPoint.of(Plain(bytes([ord('A') + i]))))
|
|
||||||
print(await tree.reference.str(0))
|
|
||||||
|
|
||||||
async def test_avl_stress(self):
|
|
||||||
set_gather_linear()
|
|
||||||
protocol = AVL(PlainComparator(Replace()))
|
|
||||||
tree: ActiveBinaryTree[Plain, Integer] = ActiveBinaryTree.empty(
|
|
||||||
protocol, Plain.factory()
|
|
||||||
)
|
|
||||||
for i in range(250):
|
|
||||||
tree = await tree.add(HashPoint.of(Plain(os.urandom(16))))
|
|
||||||
print(await AVL.height(tree.protocolized()))
|
|
||||||
|
|
||||||
async def test_encryption(self):
|
|
||||||
set_gather_linear()
|
|
||||||
encrypt_ctr = Counter(Encrypted, 'encrypt')
|
|
||||||
with self.subTest('setup'):
|
|
||||||
key = b'a' * 32
|
|
||||||
with self.subTest('create empty'):
|
|
||||||
tree: ActiveBinaryTree[Plain, Integer] = ActiveBinaryTree.empty(
|
|
||||||
AVL(PlainComparator(Replace())), Plain.factory()
|
|
||||||
)
|
|
||||||
with self.subTest('fill'):
|
|
||||||
for char in string.ascii_uppercase:
|
|
||||||
tree = await tree.add(HashPoint.of(Plain(char.encode())))
|
|
||||||
print(await tree.reference.str(0))
|
|
||||||
with self.subTest('encrypt'):
|
|
||||||
target = tree.reference
|
|
||||||
with encrypt_ctr:
|
|
||||||
target = (await Encrypted.encrypt(target, key)).decrypted
|
|
||||||
print(encrypt_ctr.counter)
|
|
||||||
tree = tree.create(target)
|
|
||||||
print(await tree.reference.str(0))
|
|
||||||
with self.subTest('alter'):
|
|
||||||
tree = await tree.add(HashPoint.of(Plain(b'NEWKEY')))
|
|
||||||
tree = await tree.remove(HashPoint.of(Plain(b'F')))
|
|
||||||
print(await tree.reference.str(0))
|
|
||||||
with self.subTest('encrypt and migrate'):
|
|
||||||
target = tree.reference
|
|
||||||
with encrypt_ctr:
|
|
||||||
eeed = await Encrypted.encrypt(target, key)
|
|
||||||
print(encrypt_ctr.counter)
|
|
||||||
print(await (await self.dr().migrate_resolved(eeed)).decrypted.str(0))
|
|
||||||
with self.subTest('re-encrypt'):
|
|
||||||
new_key = b'b' * 32
|
|
||||||
target = eeed.decrypted
|
|
||||||
with encrypt_ctr:
|
|
||||||
await Encrypted.encrypt(target, new_key)
|
|
||||||
print(encrypt_ctr.counter)
|
|
||||||
|
|
||||||
async def test_tl(self):
|
|
||||||
root = TLRootFactory(TLRParametres(2, Plain.factory())).empty()
|
|
||||||
for char in string.ascii_uppercase:
|
|
||||||
root = await root.add(HashPoint.of(Plain(char.encode())))
|
|
||||||
print(await root.str(0))
|
|
||||||
print((await root.node_resolved()).parametres.height)
|
|
67
rainbowadn/testing/test_bridge.py
Normal file
67
rainbowadn/testing/test_bridge.py
Normal file
@ -0,0 +1,67 @@
|
|||||||
|
import unittest
|
||||||
|
|
||||||
|
from rainbowadn.atomic import Plain
|
||||||
|
from rainbowadn.collection.linear import Stack
|
||||||
|
from rainbowadn.core import HashPoint, assert_none_strict, hash_point_format
|
||||||
|
from rainbowadn.flow.bridge import StackBridge
|
||||||
|
from rainbowadn.flow.core import MapReduce, Reduce
|
||||||
|
from rainbowadn.flow.sequence import DispatchMapper, SequenceDispatch
|
||||||
|
from rainbowadn.flow.stacked import StackedReducer
|
||||||
|
from rainbowadn.nullability import Nullable
|
||||||
|
|
||||||
|
|
||||||
|
class PrintDispatch(SequenceDispatch[HashPoint, None]):
|
||||||
|
async def on_first(self, element: HashPoint) -> None:
|
||||||
|
print('first', await hash_point_format(element, 0))
|
||||||
|
|
||||||
|
async def on_last(self, element: HashPoint) -> None:
|
||||||
|
print('last', await hash_point_format(element, 0))
|
||||||
|
|
||||||
|
async def on_pair(self, previous: HashPoint, element: HashPoint) -> None:
|
||||||
|
print('pair', await hash_point_format(previous, 0), await hash_point_format(element, 0))
|
||||||
|
|
||||||
|
def loose(self) -> SequenceDispatch[HashPoint, None]:
|
||||||
|
return self
|
||||||
|
|
||||||
|
|
||||||
|
class NoneReduce(Reduce[None, None]):
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__(None)
|
||||||
|
|
||||||
|
async def reduce(self, out: None, element: None) -> None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class PrintReduce(Reduce[tuple[Nullable[HashPoint], HashPoint], None]):
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__(None)
|
||||||
|
|
||||||
|
async def reduce(self, out: None, element: tuple[Nullable[HashPoint], HashPoint]) -> None:
|
||||||
|
nullable_, hashpoint_ = element
|
||||||
|
print(
|
||||||
|
'reduce',
|
||||||
|
'-' if nullable_.null() else await hash_point_format(nullable_.resolve(), 0),
|
||||||
|
await hash_point_format(hashpoint_, 0)
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def loose(self) -> Reduce[tuple[Nullable[HashPoint], HashPoint], None]:
|
||||||
|
return self
|
||||||
|
|
||||||
|
|
||||||
|
class TestBridge(unittest.IsolatedAsyncioTestCase):
|
||||||
|
async def test_stack_bridge(self):
|
||||||
|
hp: HashPoint[Stack[Plain]] = Stack.off(Plain.factory(), [Plain(b'A'), Plain(b'B'), Plain(b'C')]).hashpoint()
|
||||||
|
bridge = StackBridge(hp)
|
||||||
|
print(await hash_point_format(hp, 0))
|
||||||
|
assert_none_strict(
|
||||||
|
await bridge.reduce(MapReduce(DispatchMapper(PrintDispatch().loose()).loose(), NoneReduce()).loose())
|
||||||
|
)
|
||||||
|
|
||||||
|
async def test_stacked(self):
|
||||||
|
hp: HashPoint[Stack[Plain]] = Stack.off(Plain.factory(), [Plain(b'A'), Plain(b'B'), Plain(b'C')]).hashpoint()
|
||||||
|
bridge = StackBridge(hp)
|
||||||
|
print(await hash_point_format(hp, 0))
|
||||||
|
assert_none_strict(
|
||||||
|
await StackedReducer(bridge.loose()).loose().reduce(PrintReduce().loose())
|
||||||
|
)
|
49
rainbowadn/testing/test_encryption.py
Normal file
49
rainbowadn/testing/test_encryption.py
Normal file
@ -0,0 +1,49 @@
|
|||||||
|
import string
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from rainbowadn.atomic import Integer, Plain
|
||||||
|
from rainbowadn.collection.comparison import PlainComparator, Replace
|
||||||
|
from rainbowadn.collection.trees.binary import AVL, ActiveBinaryTree
|
||||||
|
from rainbowadn.core import HashPoint, set_gather_linear
|
||||||
|
from rainbowadn.encryption import Encrypted
|
||||||
|
from rainbowadn.instrument import Counter
|
||||||
|
from rainbowadn.testing.resolvers import default_resolver
|
||||||
|
|
||||||
|
|
||||||
|
class TestEncryption(unittest.IsolatedAsyncioTestCase):
|
||||||
|
async def test_encryption(self):
|
||||||
|
set_gather_linear()
|
||||||
|
encrypt_ctr = Counter(Encrypted, 'encrypt')
|
||||||
|
with self.subTest('setup'):
|
||||||
|
key = b'a' * 32
|
||||||
|
with self.subTest('create empty'):
|
||||||
|
tree: ActiveBinaryTree[Plain, Integer] = ActiveBinaryTree.empty(
|
||||||
|
AVL(PlainComparator(Replace())), Plain.factory()
|
||||||
|
)
|
||||||
|
with self.subTest('fill'):
|
||||||
|
for char in string.ascii_uppercase:
|
||||||
|
tree = await tree.add(HashPoint.of(Plain(char.encode())))
|
||||||
|
print(await tree.reference.str(0))
|
||||||
|
with self.subTest('encrypt'):
|
||||||
|
target = tree.reference
|
||||||
|
with encrypt_ctr:
|
||||||
|
target = (await Encrypted.encrypt(target, key)).decrypted
|
||||||
|
print(encrypt_ctr.counter)
|
||||||
|
tree = tree.create(target)
|
||||||
|
print(await tree.reference.str(0))
|
||||||
|
with self.subTest('alter'):
|
||||||
|
tree = await tree.add(HashPoint.of(Plain(b'NEWKEY')))
|
||||||
|
tree = await tree.remove(HashPoint.of(Plain(b'F')))
|
||||||
|
print(await tree.reference.str(0))
|
||||||
|
with self.subTest('encrypt and migrate'):
|
||||||
|
target = tree.reference
|
||||||
|
with encrypt_ctr:
|
||||||
|
eeed = await Encrypted.encrypt(target, key)
|
||||||
|
print(encrypt_ctr.counter)
|
||||||
|
print(await (await default_resolver().migrate_resolved(eeed)).decrypted.str(0))
|
||||||
|
with self.subTest('re-encrypt'):
|
||||||
|
new_key = b'b' * 32
|
||||||
|
target = eeed.decrypted
|
||||||
|
with encrypt_ctr:
|
||||||
|
await Encrypted.encrypt(target, new_key)
|
||||||
|
print(encrypt_ctr.counter)
|
15
rainbowadn/testing/test_tl.py
Normal file
15
rainbowadn/testing/test_tl.py
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
import string
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from rainbowadn.atomic import *
|
||||||
|
from rainbowadn.collection.linear import *
|
||||||
|
from rainbowadn.core import *
|
||||||
|
|
||||||
|
|
||||||
|
class TestTL(unittest.IsolatedAsyncioTestCase):
|
||||||
|
async def test_tl(self):
|
||||||
|
root = TLRootFactory(TLRParametres(2, Plain.factory())).empty()
|
||||||
|
for char in string.ascii_uppercase:
|
||||||
|
root = await root.add(HashPoint.of(Plain(char.encode())))
|
||||||
|
print(await root.str(0))
|
||||||
|
print((await root.node_resolved()).parametres.height)
|
104
rainbowadn/testing/test_trees.py
Normal file
104
rainbowadn/testing/test_trees.py
Normal file
@ -0,0 +1,104 @@
|
|||||||
|
import os
|
||||||
|
import time
|
||||||
|
import unittest
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from rainbowadn.atomic import Integer, Plain
|
||||||
|
from rainbowadn.chain import BlockChainFactory, ChainCollectionInterface
|
||||||
|
from rainbowadn.collection.comparison import PlainComparator, Replace
|
||||||
|
from rainbowadn.collection.pair import Pair, PairFactory
|
||||||
|
from rainbowadn.collection.trees.binary import AVL, ActiveBinaryTree
|
||||||
|
from rainbowadn.core import (
|
||||||
|
HashPoint, RainbowFactory, assert_eq, assert_false, assert_true, set_gather_asyncio,
|
||||||
|
set_gather_linear,
|
||||||
|
)
|
||||||
|
from rainbowadn.testing.resolvers import default_resolver
|
||||||
|
from rainbowadn.wrisbt import WrisbtChainProtocol, WrisbtParametres, WrisbtRoot
|
||||||
|
|
||||||
|
|
||||||
|
class TestTrees(unittest.IsolatedAsyncioTestCase):
|
||||||
|
async def test_wrisbt(self):
|
||||||
|
set_gather_linear()
|
||||||
|
with self.subTest('setup'):
|
||||||
|
stoptime = time.process_time()
|
||||||
|
|
||||||
|
def measure(message: str) -> float:
|
||||||
|
nonlocal stoptime
|
||||||
|
now = time.process_time()
|
||||||
|
delta = now - stoptime
|
||||||
|
print(message, delta)
|
||||||
|
stoptime = now
|
||||||
|
return delta
|
||||||
|
|
||||||
|
n = 5000
|
||||||
|
keysize = 7
|
||||||
|
with self.subTest('create empty'):
|
||||||
|
btree: WrisbtRoot = WrisbtRoot.empty(WrisbtParametres(5, keysize))
|
||||||
|
measure('init')
|
||||||
|
with self.subTest('add keys', n=n):
|
||||||
|
for _ in range(n):
|
||||||
|
key = os.urandom(keysize)
|
||||||
|
assert_false(await btree.contains(key))
|
||||||
|
btree = await btree.add(key)
|
||||||
|
assert_true(await btree.contains(key))
|
||||||
|
measure('add')
|
||||||
|
with self.subTest('save'):
|
||||||
|
btree = await default_resolver().migrate_resolved(btree)
|
||||||
|
measure('save')
|
||||||
|
set_gather_asyncio()
|
||||||
|
with self.subTest('resolve and iterate'):
|
||||||
|
assert_eq(len(await btree.keys()), n)
|
||||||
|
print(btree.height)
|
||||||
|
measure('resolve and iterate')
|
||||||
|
with self.subTest('resolve and add', n=n):
|
||||||
|
for _ in range(n):
|
||||||
|
key = os.urandom(keysize)
|
||||||
|
assert_false(await btree.contains(key))
|
||||||
|
btree = await btree.add(key)
|
||||||
|
assert_true(await btree.contains(key))
|
||||||
|
print(btree.height)
|
||||||
|
measure('resolve and add')
|
||||||
|
|
||||||
|
async def test_wrisbt_index(self):
|
||||||
|
set_gather_linear()
|
||||||
|
with self.subTest('create empty'):
|
||||||
|
factory: RainbowFactory[Pair[Plain, Plain]] = PairFactory(Plain.factory(), Plain.factory()).loose()
|
||||||
|
chain: ChainCollectionInterface[Any, Pair[Plain, Plain], WrisbtRoot] = BlockChainFactory(
|
||||||
|
WrisbtChainProtocol(factory, 2).loose()
|
||||||
|
).empty().loose()
|
||||||
|
with self.subTest('fill'):
|
||||||
|
for _ in range(100):
|
||||||
|
chain = await chain.add(
|
||||||
|
HashPoint.of(
|
||||||
|
Pair(
|
||||||
|
HashPoint.of(Plain(os.urandom(16))),
|
||||||
|
HashPoint.of(Plain(os.urandom(16)))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
with self.subTest('check'):
|
||||||
|
set_gather_asyncio()
|
||||||
|
assert_true(await chain.verify())
|
||||||
|
with self.subTest('measure height'):
|
||||||
|
reference = await chain.actual_state()
|
||||||
|
assert not reference.null()
|
||||||
|
print((await reference.resolve()).height)
|
||||||
|
|
||||||
|
async def test_avl(self):
|
||||||
|
set_gather_linear()
|
||||||
|
tree: ActiveBinaryTree[Plain, Integer] = ActiveBinaryTree.empty(
|
||||||
|
AVL(PlainComparator(Replace())), Plain.factory()
|
||||||
|
)
|
||||||
|
for i in range(26):
|
||||||
|
tree = await tree.add(HashPoint.of(Plain(bytes([ord('A') + i]))))
|
||||||
|
print(await tree.reference.str(0))
|
||||||
|
|
||||||
|
async def test_avl_stress(self):
|
||||||
|
set_gather_linear()
|
||||||
|
protocol = AVL(PlainComparator(Replace()))
|
||||||
|
tree: ActiveBinaryTree[Plain, Integer] = ActiveBinaryTree.empty(
|
||||||
|
protocol, Plain.factory()
|
||||||
|
)
|
||||||
|
for i in range(250):
|
||||||
|
tree = await tree.add(HashPoint.of(Plain(os.urandom(16))))
|
||||||
|
print(await AVL.height(tree.protocolized()))
|
52
rainbowadn/testing/test_v13.py
Normal file
52
rainbowadn/testing/test_v13.py
Normal file
@ -0,0 +1,52 @@
|
|||||||
|
import unittest
|
||||||
|
|
||||||
|
import nacl.signing
|
||||||
|
|
||||||
|
from rainbowadn.chain import ReductionChainMetaFactory
|
||||||
|
from rainbowadn.core import HashPoint, assert_true, set_gather_asyncio, set_gather_linear
|
||||||
|
from rainbowadn.nullability import NotNull
|
||||||
|
from rainbowadn.testing.resolvers import default_resolver
|
||||||
|
from rainbowadn.v13 import BankChain, CoinData, MINT_CONST, Subject, Transaction
|
||||||
|
|
||||||
|
|
||||||
|
class TestV13(unittest.IsolatedAsyncioTestCase):
|
||||||
|
async def test_bankchain(self):
|
||||||
|
set_gather_linear()
|
||||||
|
with self.subTest('create empty'):
|
||||||
|
bank: BankChain = BankChain.empty(ReductionChainMetaFactory().loose())
|
||||||
|
with self.subTest('prepare transactions'):
|
||||||
|
key_0 = nacl.signing.SigningKey.generate()
|
||||||
|
transaction_0 = Transaction.make(
|
||||||
|
[],
|
||||||
|
[CoinData.of(Subject(key_0.verify_key), 1_000_000)],
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
coin_0, coin_1 = await transaction_0.coins(MINT_CONST, NotNull(HashPoint.of(Subject(key_0.verify_key))))
|
||||||
|
with self.subTest('add transactions'):
|
||||||
|
bank = await bank.adds(
|
||||||
|
[
|
||||||
|
transaction_0,
|
||||||
|
Transaction.make(
|
||||||
|
[coin_1],
|
||||||
|
[CoinData.of(Subject(nacl.signing.SigningKey.generate().verify_key), 10_000)],
|
||||||
|
[key_0]
|
||||||
|
),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
with self.subTest('add empty'):
|
||||||
|
bank = await bank.adds(
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
print(await bank.reference.str(0))
|
||||||
|
with self.subTest('verify'):
|
||||||
|
assert_true(await bank.verify())
|
||||||
|
with self.subTest('recover'):
|
||||||
|
bank = BankChain.from_reference(
|
||||||
|
ReductionChainMetaFactory(), await default_resolver().migrate_resolved(bank.reference)
|
||||||
|
)
|
||||||
|
set_gather_asyncio()
|
||||||
|
print('recovering')
|
||||||
|
print(await bank.reference.str(0))
|
||||||
|
print('recovered')
|
||||||
|
with self.subTest('verify'):
|
||||||
|
assert_true(await bank.verify())
|
16
trace.py
16
trace.py
@ -9,14 +9,14 @@ from contextlib import ExitStack
|
|||||||
from nacl.signing import SigningKey
|
from nacl.signing import SigningKey
|
||||||
|
|
||||||
from plot import plot
|
from plot import plot
|
||||||
from rainbowadn.chain import *
|
from rainbowadn.chain import ReductionChainMetaFactory
|
||||||
from rainbowadn.collection.linear import *
|
from rainbowadn.collection.linear import Stack
|
||||||
from rainbowadn.collection.trees.binary import *
|
from rainbowadn.collection.trees.binary import ActiveBinaryTree
|
||||||
from rainbowadn.core import *
|
from rainbowadn.core import ExtendableResolver, HashPoint, assert_true, set_gather_asyncio, set_gather_linear
|
||||||
from rainbowadn.instrument import *
|
from rainbowadn.instrument import Concurrency, Counter, EntryExit, Instrumentation
|
||||||
from rainbowadn.nullability import *
|
from rainbowadn.nullability import NotNull
|
||||||
from rainbowadn.testing.resolvers import *
|
from rainbowadn.testing.resolvers import DelayedResolver, DictResolver
|
||||||
from rainbowadn.v13 import *
|
from rainbowadn.v13 import BankChain, CoinData, MINT_CONST, Subject, Transaction
|
||||||
|
|
||||||
|
|
||||||
def get_dr() -> ExtendableResolver:
|
def get_dr() -> ExtendableResolver:
|
||||||
|
Loading…
Reference in New Issue
Block a user