flow13 + pure reduce
This commit is contained in:
parent
8bf8fcea32
commit
86f460989e
@ -25,6 +25,17 @@ class KeyValue(Keyed[KVKeyType], Generic[KVKeyType, KVValueType]):
|
||||
def __factory__(self) -> RainbowFactory['KeyValue[KVKeyType, KVValueType]']:
|
||||
return KeyValueFactory(self.key.factory, self.value.factory)
|
||||
|
||||
async def str(self, tab: int) -> str:
|
||||
assert isinstance(tab, int)
|
||||
key_str, value_str = await gather(
|
||||
hash_point_format(self.key, tab),
|
||||
hash_point_format(self.value, tab)
|
||||
)
|
||||
assert isinstance(key_str, str)
|
||||
assert isinstance(value_str, str)
|
||||
return f'{key_str}' \
|
||||
f'{tabulate(tab)}{value_str}'
|
||||
|
||||
|
||||
class KeyValueFactory(
|
||||
RainbowFactory[KeyValue[KVKeyType, KVValueType]],
|
||||
@ -43,3 +54,6 @@ class KeyValueFactory(
|
||||
ResolverOrigin(self.key_factory, source[:HashPoint.HASH_LENGTH], resolver).hash_point(),
|
||||
ResolverOrigin(self.value_factory, source[HashPoint.HASH_LENGTH:], resolver).hash_point(),
|
||||
)
|
||||
|
||||
def loose(self) -> RainbowFactory[KeyValue[KVKeyType, KVValueType]]:
|
||||
return self
|
||||
|
@ -73,15 +73,24 @@ class Stack(RecursiveMentionable, Generic[ElementType]):
|
||||
cls,
|
||||
reference: NullableReference['Stack[ElementType]']
|
||||
) -> AsyncIterable[HashPoint[ElementType]]:
|
||||
assert isinstance(reference, NullableReference)
|
||||
async for stack in cls.iter_stacks(reference):
|
||||
yield stack.element
|
||||
|
||||
@classmethod
|
||||
async def iter_stacks(
|
||||
cls,
|
||||
reference: NullableReference['Stack[ElementType]']
|
||||
) -> AsyncIterable['Stack[ElementType]']:
|
||||
assert isinstance(reference, NullableReference)
|
||||
if reference.null():
|
||||
pass
|
||||
else:
|
||||
stack: Stack[ElementType] = await reference.resolve()
|
||||
assert isinstance(stack, Stack)
|
||||
yield stack.element
|
||||
async for element in cls.iter(stack.previous):
|
||||
yield element
|
||||
yield stack
|
||||
async for stack in cls.iter_stacks(stack.previous):
|
||||
yield stack
|
||||
|
||||
@classmethod
|
||||
async def list(
|
||||
|
@ -1,9 +1,11 @@
|
||||
__all__ = (
|
||||
'BinaryAction',
|
||||
'CompareAction',
|
||||
'AddAction', 'RemoveAction', 'ContainsAction',
|
||||
'Symmetric', 'InnerOuter', 'OuterInner',
|
||||
)
|
||||
|
||||
from .binaryaction import BinaryAction
|
||||
from .compareaction import CompareAction
|
||||
from .stdactions import AddAction, ContainsAction, RemoveAction
|
||||
from .symmetric import InnerOuter, OuterInner, Symmetric
|
||||
|
@ -87,6 +87,12 @@ class ActiveBinaryTree(
|
||||
]:
|
||||
return self
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, ActiveBinaryTree):
|
||||
return self.reference == other.reference
|
||||
else:
|
||||
return NotImplemented
|
||||
|
||||
|
||||
class ActiveCreation(
|
||||
BalancedCreation[
|
||||
|
@ -20,8 +20,8 @@ __all__ = (
|
||||
from .asserts import assert_eq, assert_false, assert_none, assert_none_strict, assert_true, assert_trues
|
||||
from .extendableresolver import ExtendableResolver
|
||||
from .gather import alist, asum, gather, set_gather_asyncio, set_gather_linear
|
||||
from .hash_point_format import hash_point_format, tabulate
|
||||
from .hashpoint import HashPoint
|
||||
from .hashpointformat import hash_point_format, tabulate
|
||||
from .hashresolver import HashResolver
|
||||
from .localmetaorigin import LocalMetaOrigin
|
||||
from .localorigin import LocalOrigin
|
||||
|
@ -3,9 +3,13 @@ todo: deprecate
|
||||
"""
|
||||
|
||||
__all__ = (
|
||||
'ListBridge',
|
||||
'StackBridge',
|
||||
'StageBridgeVP', 'StageBridgeM', 'stage_bridge',
|
||||
'StateBridgeM', 'StateBridgeV', 'state_bridge',
|
||||
)
|
||||
|
||||
from ._listbridge import ListBridge
|
||||
from ._stackbridge import StackBridge
|
||||
from ._stagebridge import StageBridgeM, StageBridgeVP, stage_bridge
|
||||
from ._statebridge import StateBridgeM, StateBridgeV, state_bridge
|
||||
|
38
rainbowadn/flow/bridge/_listbridge.py
Normal file
38
rainbowadn/flow/bridge/_listbridge.py
Normal file
@ -0,0 +1,38 @@
|
||||
from typing import Generic, TypeVar
|
||||
|
||||
from rainbowadn.core import *
|
||||
from rainbowadn.flow.core import *
|
||||
|
||||
__all__ = ('ListBridge',)
|
||||
|
||||
Element = TypeVar('Element')
|
||||
Out = TypeVar('Out')
|
||||
|
||||
|
||||
class ListBridge(
|
||||
Reducer[Element, Out],
|
||||
Generic[Element, Out]
|
||||
):
|
||||
def __init__(self, target: list[Element]):
|
||||
assert isinstance(target, list)
|
||||
self.target = target
|
||||
|
||||
async def reduce(self, reduce: Reduce[Element, Out]) -> Out:
|
||||
assert isinstance(reduce, Reduce)
|
||||
if not self.target:
|
||||
return reduce.initial
|
||||
elif len(self.target) == 1:
|
||||
return await reduce.reduce(reduce.initial, self.target[0])
|
||||
else:
|
||||
middle = len(self.target) // 2
|
||||
left_list: list[Element] = self.target[:middle]
|
||||
right_list: list[Element] = self.target[middle:]
|
||||
left_bridge: ListBridge[Element, Out] = ListBridge(left_list)
|
||||
right_bridge: ListBridge[Element, Out] = ListBridge(right_list)
|
||||
left: Out
|
||||
right: Out
|
||||
left, right = await gather(
|
||||
left_bridge.reduce(reduce),
|
||||
right_bridge.reduce(reduce),
|
||||
)
|
||||
return await reduce.merge(left, right)
|
@ -4,6 +4,7 @@ from rainbowadn.collection.linear import *
|
||||
from rainbowadn.core import *
|
||||
from rainbowadn.flow.core import *
|
||||
from rainbowadn.flow.sequence import *
|
||||
from ._listbridge import *
|
||||
|
||||
__all__ = ('StackBridge',)
|
||||
|
||||
@ -12,24 +13,48 @@ Out = TypeVar('Out')
|
||||
|
||||
|
||||
class StackBridge(
|
||||
Reducer[SequenceDispatcher[HashPoint[ElementT], Out], Out],
|
||||
Reducer[SequenceDispatcher[Stack[ElementT], Out], Out],
|
||||
Generic[Out, ElementT]
|
||||
):
|
||||
def __init__(self, target: HashPoint[Stack[ElementT]]):
|
||||
assert isinstance(target, HashPoint)
|
||||
self.target = target
|
||||
|
||||
async def reduce(self, reduce: Reduce[SequenceDispatcher[HashPoint[ElementT], Out], Out]) -> Out:
|
||||
async def reduce(self, reduce: Reduce[SequenceDispatcher[Stack[ElementT], Out], Out]) -> Out:
|
||||
resolved: Stack[ElementT] = await self.target.resolve()
|
||||
out: Out = reduce.initial
|
||||
out = await reduce.reduce(out, LastDispatcher(resolved.element))
|
||||
deepest: HashPoint[ElementT] = resolved.element
|
||||
for element in [x async for x in Stack.iter(resolved.previous)]:
|
||||
assert isinstance(element, HashPoint)
|
||||
out = await reduce.reduce(out, PairDispatcher(element, deepest))
|
||||
deepest = element
|
||||
out = await reduce.reduce(out, FirstDispatcher(deepest))
|
||||
return out
|
||||
dispatchers: list[SequenceDispatcher[Stack[ElementT], Out]] = [LastDispatcher(resolved)]
|
||||
deepest: Stack[ElementT] = resolved
|
||||
stack: Stack[ElementT]
|
||||
for stack in [x async for x in Stack.iter_stacks(resolved.previous)]:
|
||||
assert isinstance(stack, Stack)
|
||||
dispatchers.append(PairDispatcher(stack, deepest))
|
||||
deepest = stack
|
||||
dispatchers.append(FirstDispatcher(deepest))
|
||||
bridge: Reducer[SequenceDispatcher[Stack[ElementT], Out], Out] = ListBridge(dispatchers)
|
||||
assert isinstance(bridge, Reducer)
|
||||
return await bridge.reduce(reduce)
|
||||
|
||||
def loose(self) -> Reducer[SequenceDispatcher[HashPoint[ElementT], Out], Out]:
|
||||
def loose(self) -> Reducer[SequenceDispatcher[Stack[ElementT], Out], Out]:
|
||||
return self
|
||||
|
||||
@classmethod
|
||||
def mapper(cls) -> Mapper[
|
||||
HashPoint[Stack[ElementT]],
|
||||
Reducer[SequenceDispatcher[Stack[ElementT], Out], Out]
|
||||
]:
|
||||
return CallableMapper(cls)
|
||||
|
||||
@classmethod
|
||||
def element_of(cls, stack: Stack[ElementT]) -> HashPoint[ElementT]:
|
||||
return stack.element
|
||||
|
||||
@classmethod
|
||||
def element_mapper(cls) -> Mapper[Stack[ElementT], HashPoint[ElementT]]:
|
||||
return CallableMapper(cls.element_of)
|
||||
|
||||
def over_elements(self) -> Reducer[SequenceDispatcher[HashPoint[ElementT], Out], Out]:
|
||||
cdm: Mapper[
|
||||
SequenceDispatcher[Stack[ElementT], None],
|
||||
SequenceDispatcher[HashPoint[ElementT], None]
|
||||
] = CompositionDispatcher.mapper(self.element_mapper())
|
||||
return MapReducer(cdm, self)
|
||||
|
@ -67,7 +67,10 @@ class StageBridgeM(
|
||||
assert isinstance(base, HashPoint)
|
||||
stages_stack: HashPoint[Stack[StageT]] = pair.element1
|
||||
assert isinstance(stages_stack, HashPoint)
|
||||
stack_bridge: Reducer[SequenceDispatcher[HashPoint[StageT], bool], bool] = StackBridge(stages_stack).loose()
|
||||
stack_bridge: Reducer[
|
||||
SequenceDispatcher[HashPoint[StageT], bool],
|
||||
bool
|
||||
] = StackBridge(stages_stack).over_elements()
|
||||
assert isinstance(stack_bridge, Reducer)
|
||||
return base, stack_bridge
|
||||
|
||||
|
@ -8,7 +8,7 @@ from rainbowadn.flow.verification.core import *
|
||||
from rainbowadn.flow.verification.stateverification import *
|
||||
from rainbowadn.nullability import *
|
||||
|
||||
__all__ = ('StateBridgeM', 'StateBridgeV',)
|
||||
__all__ = ('StateBridgeM', 'StateBridgeV', 'state_bridge',)
|
||||
|
||||
HeaderT = TypeVar('HeaderT')
|
||||
StateT = TypeVar('StateT')
|
||||
|
@ -1,13 +1,19 @@
|
||||
__all__ = (
|
||||
'CallableMapper',
|
||||
'Composition',
|
||||
'Mapper',
|
||||
'MapReduce',
|
||||
'MapReducer',
|
||||
'PureReduce',
|
||||
'Reduce',
|
||||
'Reducer',
|
||||
)
|
||||
|
||||
from ._callablemapper import CallableMapper
|
||||
from ._composition import Composition
|
||||
from ._mapper import Mapper
|
||||
from ._mapreduce import MapReduce
|
||||
from ._mapreducer import MapReducer
|
||||
from ._purereduce import PureReduce
|
||||
from ._reduce import Reduce
|
||||
from ._reducer import Reducer
|
||||
|
20
rainbowadn/flow/core/_callablemapper.py
Normal file
20
rainbowadn/flow/core/_callablemapper.py
Normal file
@ -0,0 +1,20 @@
|
||||
from typing import Callable, Generic, TypeVar
|
||||
|
||||
from ._mapper import Mapper
|
||||
|
||||
__all__ = ('CallableMapper',)
|
||||
|
||||
Element = TypeVar('Element')
|
||||
Mapped = TypeVar('Mapped')
|
||||
|
||||
|
||||
class CallableMapper(
|
||||
Mapper[Element, Mapped],
|
||||
Generic[Element, Mapped]
|
||||
):
|
||||
def __init__(self, target: Callable[[Element], Mapped]):
|
||||
assert callable(target)
|
||||
self.target = target
|
||||
|
||||
async def map(self, element: Element) -> Mapped:
|
||||
return self.target(element)
|
23
rainbowadn/flow/core/_composition.py
Normal file
23
rainbowadn/flow/core/_composition.py
Normal file
@ -0,0 +1,23 @@
|
||||
from typing import Generic, TypeVar
|
||||
|
||||
from ._mapper import *
|
||||
|
||||
__all__ = ('Composition',)
|
||||
|
||||
Element = TypeVar('Element')
|
||||
Middle = TypeVar('Middle')
|
||||
Mapped = TypeVar('Mapped')
|
||||
|
||||
|
||||
class Composition(
|
||||
Mapper[Element, Mapped],
|
||||
Generic[Element, Mapped, Middle]
|
||||
):
|
||||
def __init__(self, domain: Mapper[Element, Middle], codomain: Mapper[Middle, Mapped]):
|
||||
assert isinstance(domain, Mapper)
|
||||
assert isinstance(codomain, Mapper)
|
||||
self.domain = domain
|
||||
self.codomain = codomain
|
||||
|
||||
async def map(self, element: Element) -> Mapped:
|
||||
return await self.codomain.map(await self.domain.map(element))
|
@ -21,5 +21,8 @@ class MapReduce(Reduce[Element, Out], Generic[Element, Out, Mapped]):
|
||||
async def reduce(self, out: Out, element: Element) -> Out:
|
||||
return await self.reduce_mapped.reduce(out, await self.mapper.map(element))
|
||||
|
||||
async def merge(self, left: Out, right: Out) -> Out:
|
||||
return await self.reduce_mapped.merge(left, right)
|
||||
|
||||
def loose(self) -> Reduce[Element, Out]:
|
||||
return self
|
||||
|
@ -21,4 +21,4 @@ class MapReducer(Reducer[Mapped, Out], Generic[Mapped, Out, Element]):
|
||||
|
||||
async def reduce(self, reduce: Reduce[Mapped, Out]) -> Out:
|
||||
assert isinstance(reduce, Reduce)
|
||||
return self.reducer.reduce(MapReduce(self.mapper, reduce))
|
||||
return await self.reducer.reduce(MapReduce(self.mapper, reduce))
|
||||
|
18
rainbowadn/flow/core/_purereduce.py
Normal file
18
rainbowadn/flow/core/_purereduce.py
Normal file
@ -0,0 +1,18 @@
|
||||
from typing import Generic, TypeVar
|
||||
|
||||
from ._reduce import *
|
||||
|
||||
__all__ = ('PureReduce',)
|
||||
|
||||
Pure = TypeVar('Pure')
|
||||
|
||||
|
||||
class PureReduce(Reduce[Pure, Pure], Generic[Pure]):
|
||||
def __init__(self, initial: Pure):
|
||||
super().__init__(initial)
|
||||
|
||||
async def reduce(self, out: Pure, element: Pure) -> Pure:
|
||||
return await self.merge(out, element)
|
||||
|
||||
async def merge(self, left: Pure, right: Pure) -> Pure:
|
||||
raise NotImplementedError
|
@ -12,3 +12,6 @@ class Reduce(Generic[Element, Out]):
|
||||
|
||||
async def reduce(self, out: Out, element: Element) -> Out:
|
||||
raise NotImplementedError
|
||||
|
||||
async def merge(self, left: Out, right: Out) -> Out:
|
||||
raise NotImplementedError
|
||||
|
5
rainbowadn/flow/primitive/__init__.py
Normal file
5
rainbowadn/flow/primitive/__init__.py
Normal file
@ -0,0 +1,5 @@
|
||||
__all__ = (
|
||||
'UnitReducer',
|
||||
)
|
||||
|
||||
from ._unitreducer import UnitReducer
|
16
rainbowadn/flow/primitive/_constmapper.py
Normal file
16
rainbowadn/flow/primitive/_constmapper.py
Normal file
@ -0,0 +1,16 @@
|
||||
from typing import Generic, TypeVar
|
||||
|
||||
from rainbowadn.flow.core import *
|
||||
|
||||
__all__ = ('ConstMapper',)
|
||||
|
||||
Element = TypeVar('Element')
|
||||
Mapped = TypeVar('Mapped')
|
||||
|
||||
|
||||
class ConstMapper(Mapper[Element, Mapped], Generic[Element, Mapped]):
|
||||
def __init__(self, value: Mapped):
|
||||
self.value = value
|
||||
|
||||
async def map(self, element: Element) -> Mapped:
|
||||
return self.value
|
20
rainbowadn/flow/primitive/_unitreducer.py
Normal file
20
rainbowadn/flow/primitive/_unitreducer.py
Normal file
@ -0,0 +1,20 @@
|
||||
from typing import Generic, TypeVar
|
||||
|
||||
from rainbowadn.flow.core import *
|
||||
|
||||
__all__ = ('UnitReducer',)
|
||||
|
||||
Element = TypeVar('Element')
|
||||
Out = TypeVar('Out')
|
||||
|
||||
|
||||
class UnitReducer(
|
||||
Reducer[Element, Out],
|
||||
Generic[Element, Out]
|
||||
):
|
||||
def __init__(self, element: Element):
|
||||
self.element = element
|
||||
|
||||
async def reduce(self, reduce: Reduce[Element, Out]) -> Out:
|
||||
assert isinstance(reduce, Reduce)
|
||||
return await reduce.reduce(reduce.initial, self.element)
|
@ -1,9 +1,11 @@
|
||||
__all__ = (
|
||||
'CompositionDispatcher',
|
||||
'DispatchMapper',
|
||||
'SequenceDispatch',
|
||||
'SequenceDispatcher', 'FirstDispatcher', 'LastDispatcher', 'PairDispatcher',
|
||||
)
|
||||
|
||||
from ._compositiondispatcher import CompositionDispatcher
|
||||
from ._dispatchmapper import DispatchMapper
|
||||
from ._sequencedispatch import SequenceDispatch
|
||||
from ._sequencedispatcher import FirstDispatcher, LastDispatcher, PairDispatcher, SequenceDispatcher
|
||||
|
42
rainbowadn/flow/sequence/_compositiondispatch.py
Normal file
42
rainbowadn/flow/sequence/_compositiondispatch.py
Normal file
@ -0,0 +1,42 @@
|
||||
from typing import Generic, TypeVar
|
||||
|
||||
from rainbowadn.core import *
|
||||
from rainbowadn.flow.core import *
|
||||
from ._sequencedispatch import *
|
||||
|
||||
__all__ = ('CompositionDispatch',)
|
||||
|
||||
Element = TypeVar('Element')
|
||||
Out = TypeVar('Out')
|
||||
Middle = TypeVar('Middle')
|
||||
|
||||
|
||||
class CompositionDispatch(
|
||||
SequenceDispatch[Element, Out],
|
||||
Generic[Element, Out, Middle],
|
||||
):
|
||||
def __init__(
|
||||
self,
|
||||
domain: Mapper[Element, Middle],
|
||||
codomain: SequenceDispatch[Middle, Out]
|
||||
):
|
||||
assert isinstance(domain, Mapper)
|
||||
assert isinstance(codomain, SequenceDispatch)
|
||||
self.domain = domain
|
||||
self.codomain = codomain
|
||||
|
||||
async def on_first(self, element: Element) -> Out:
|
||||
return await self.codomain.on_first(await self.domain.map(element))
|
||||
|
||||
async def on_last(self, element: Element) -> Out:
|
||||
return await self.codomain.on_last(await self.domain.map(element))
|
||||
|
||||
async def on_pair(self, previous: Element, element: Element) -> Out:
|
||||
previous_middle, element_middle = await gather(
|
||||
self.domain.map(previous),
|
||||
self.domain.map(element),
|
||||
)
|
||||
return await self.codomain.on_pair(
|
||||
previous_middle,
|
||||
element_middle
|
||||
)
|
50
rainbowadn/flow/sequence/_compositiondispatcher.py
Normal file
50
rainbowadn/flow/sequence/_compositiondispatcher.py
Normal file
@ -0,0 +1,50 @@
|
||||
from typing import Generic, TypeVar
|
||||
|
||||
from rainbowadn.flow.core import *
|
||||
from ._compositiondispatch import *
|
||||
from ._sequencedispatch import *
|
||||
from ._sequencedispatcher import *
|
||||
|
||||
__all__ = ('CompositionDispatcher',)
|
||||
|
||||
Element = TypeVar('Element')
|
||||
Out = TypeVar('Out')
|
||||
Middle = TypeVar('Middle')
|
||||
|
||||
|
||||
class CompositionDispatcher(
|
||||
SequenceDispatcher[Middle, Out],
|
||||
Generic[Middle, Out, Element]
|
||||
):
|
||||
def __init__(
|
||||
self,
|
||||
mapper: Mapper[Element, Middle],
|
||||
dispatcher: SequenceDispatcher[Element, Out]
|
||||
):
|
||||
assert isinstance(mapper, Mapper)
|
||||
assert isinstance(dispatcher, SequenceDispatcher)
|
||||
self.mapper = mapper
|
||||
self.dispatcher = dispatcher
|
||||
|
||||
async def dispatch(self, dispatch: SequenceDispatch[Middle, Out]) -> Out:
|
||||
cd: SequenceDispatch[Element, Out] = CompositionDispatch(self.mapper, dispatch)
|
||||
assert isinstance(cd, SequenceDispatch)
|
||||
return await self.dispatcher.dispatch(
|
||||
cd
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def mapper(cls, mapper: Mapper[Element, Middle]) -> Mapper[
|
||||
SequenceDispatcher[Element, Out],
|
||||
SequenceDispatcher[Middle, Out],
|
||||
]:
|
||||
assert isinstance(mapper, Mapper)
|
||||
|
||||
def wrap(dispatcher: SequenceDispatcher[Element, Out]) -> SequenceDispatcher[Middle, Out]:
|
||||
assert isinstance(dispatcher, SequenceDispatcher)
|
||||
return CompositionDispatcher(
|
||||
mapper,
|
||||
dispatcher
|
||||
)
|
||||
|
||||
return CallableMapper(wrap)
|
@ -24,5 +24,8 @@ class StackedReduce(
|
||||
assert isinstance(element, SequenceDispatcher)
|
||||
return await element.dispatch(StackedDispatch(self.stacked, out))
|
||||
|
||||
async def merge(self, left: Out, right: Out) -> Out:
|
||||
return await self.stacked.merge(left, right)
|
||||
|
||||
def loose(self) -> Reduce[SequenceDispatcher[Stacked, Out], Out]:
|
||||
return self
|
||||
|
@ -28,3 +28,10 @@ class StackedReducer(
|
||||
|
||||
def loose(self) -> Reducer[tuple[Nullable[Stacked], Stacked], Out]:
|
||||
return self
|
||||
|
||||
@classmethod
|
||||
def mapper(cls) -> Mapper[
|
||||
Reducer[SequenceDispatcher[Stacked, Out], Out],
|
||||
Reducer[tuple[Nullable[Stacked], Stacked], Out],
|
||||
]:
|
||||
return CallableMapper(cls)
|
||||
|
45
rainbowadn/flow/verification/chainverification.py
Normal file
45
rainbowadn/flow/verification/chainverification.py
Normal file
@ -0,0 +1,45 @@
|
||||
from typing import TypeVar
|
||||
|
||||
from rainbowadn.flow.core import *
|
||||
from rainbowadn.flow.sequence import *
|
||||
from rainbowadn.flow.stacked import *
|
||||
from rainbowadn.flow.verification.core import *
|
||||
from rainbowadn.nullability import *
|
||||
|
||||
__all__ = ('chain_verification',)
|
||||
|
||||
Link = TypeVar('Link')
|
||||
Chain = TypeVar('Chain')
|
||||
|
||||
|
||||
def chain_verification(
|
||||
chain_to_dispatched: Mapper[Chain, Reducer[SequenceDispatcher[Link, bool], bool]],
|
||||
link_verification: Verification[tuple[Nullable[Link], Link]],
|
||||
) -> Verification[
|
||||
Chain
|
||||
]:
|
||||
assert isinstance(chain_to_dispatched, Mapper)
|
||||
assert isinstance(link_verification, Verification)
|
||||
dispatched_to_stacked: Mapper[
|
||||
Reducer[SequenceDispatcher[Link, bool], bool],
|
||||
Reducer[tuple[Nullable[Link], Link], bool]
|
||||
] = StackedReducer.mapper()
|
||||
assert isinstance(dispatched_to_stacked, Mapper)
|
||||
chain_to_stacked: Mapper[
|
||||
Chain,
|
||||
Reducer[tuple[Nullable[Link], Link], bool]
|
||||
] = Composition(
|
||||
chain_to_dispatched,
|
||||
dispatched_to_stacked
|
||||
)
|
||||
assert isinstance(chain_to_stacked, Mapper)
|
||||
stacked_verification: Verification[
|
||||
Reducer[tuple[Nullable[Link], Link], bool]
|
||||
] = ReduceVerification(link_verification).loose()
|
||||
assert isinstance(stacked_verification, Verification)
|
||||
verification: Verification[Chain] = CompositionVerification(
|
||||
chain_to_stacked,
|
||||
stacked_verification
|
||||
)
|
||||
assert isinstance(verification, Verification)
|
||||
return verification
|
@ -1,10 +1,12 @@
|
||||
__all__ = (
|
||||
'CompositionVerification',
|
||||
'MapperVerification',
|
||||
'ReduceVerification',
|
||||
'Verification',
|
||||
'VerifyReduce',
|
||||
)
|
||||
|
||||
from ._compositionverification import CompositionVerification
|
||||
from ._mapperverification import MapperVerification
|
||||
from ._reduceverification import ReduceVerification
|
||||
from ._verification import Verification
|
||||
|
@ -0,0 +1,25 @@
|
||||
from typing import Generic, TypeVar
|
||||
|
||||
from rainbowadn.core import *
|
||||
from rainbowadn.flow.core import *
|
||||
from ._verification import *
|
||||
|
||||
__all__ = ('CompositionVerification',)
|
||||
|
||||
Verified = TypeVar('Verified')
|
||||
Middle = TypeVar('Middle')
|
||||
|
||||
|
||||
class CompositionVerification(
|
||||
Verification[Verified],
|
||||
Generic[Verified, Middle]
|
||||
):
|
||||
def __init__(self, domain: Mapper[Verified, Middle], codomain: Verification[Middle]):
|
||||
assert isinstance(domain, Mapper)
|
||||
assert isinstance(codomain, Verification)
|
||||
self.domain = domain
|
||||
self.codomain = codomain
|
||||
|
||||
async def verify(self, element: Verified) -> bool:
|
||||
assert_true(await self.codomain.verify(await self.domain.map(element)))
|
||||
return True
|
@ -4,11 +4,11 @@ from rainbowadn.flow.core import *
|
||||
__all__ = ('VerifyReduce',)
|
||||
|
||||
|
||||
class VerifyReduce(Reduce[bool, bool]):
|
||||
class VerifyReduce(PureReduce[bool]):
|
||||
def __init__(self):
|
||||
super().__init__(True)
|
||||
|
||||
async def reduce(self, out: bool, element: bool) -> bool:
|
||||
assert_true(out)
|
||||
assert_true(element)
|
||||
async def merge(self, left: bool, right: bool) -> bool:
|
||||
assert_true(left)
|
||||
assert_true(right)
|
||||
return True
|
||||
|
@ -9,16 +9,16 @@ __all__ = ('StateVerification',)
|
||||
|
||||
Header = TypeVar('Header')
|
||||
State = TypeVar('State')
|
||||
Chain = TypeVar('Chain')
|
||||
Link = TypeVar('Link')
|
||||
|
||||
|
||||
class StateVerification(
|
||||
Verification[tuple[Nullable[Chain], Chain]],
|
||||
Generic[Header, State, Chain]
|
||||
Verification[tuple[Nullable[Link], Link]],
|
||||
Generic[Header, State, Link]
|
||||
):
|
||||
def __init__(
|
||||
self,
|
||||
mapper: Mapper[Chain, tuple[Header, State]],
|
||||
mapper: Mapper[Link, tuple[Header, State]],
|
||||
verification: Verification[tuple[Nullable[State], Header, State]]
|
||||
):
|
||||
assert isinstance(mapper, Mapper)
|
||||
@ -26,7 +26,7 @@ class StateVerification(
|
||||
self.mapper = mapper
|
||||
self.verification = verification
|
||||
|
||||
async def _state(self, chain: Nullable[Chain]) -> Nullable[State]:
|
||||
async def _state(self, chain: Nullable[Link]) -> Nullable[State]:
|
||||
assert isinstance(chain, Nullable)
|
||||
if chain.null():
|
||||
return Null()
|
||||
@ -35,10 +35,10 @@ class StateVerification(
|
||||
_, state = await self.mapper.map(chain.resolve())
|
||||
return NotNull(state)
|
||||
|
||||
async def _tuple(self, element: tuple[Nullable[Chain], Chain]) -> tuple[Nullable[State], Header, State]:
|
||||
async def _tuple(self, element: tuple[Nullable[Link], Link]) -> tuple[Nullable[State], Header, State]:
|
||||
assert isinstance(element, tuple)
|
||||
previous: Nullable[Chain]
|
||||
chain: Chain
|
||||
previous: Nullable[Link]
|
||||
chain: Link
|
||||
previous, chain = element
|
||||
assert isinstance(previous, Nullable)
|
||||
previous_state: Nullable[State]
|
||||
@ -51,10 +51,10 @@ class StateVerification(
|
||||
assert isinstance(previous_state, Nullable)
|
||||
return previous_state, header, state
|
||||
|
||||
async def verify(self, element: tuple[Nullable[Chain], Chain]) -> bool:
|
||||
async def verify(self, element: tuple[Nullable[Link], Link]) -> bool:
|
||||
assert isinstance(element, tuple)
|
||||
assert_true(await self.verification.map(await self._tuple(element)))
|
||||
assert_true(await self.verification.verify(await self._tuple(element)))
|
||||
return True
|
||||
|
||||
def loose(self) -> Verification[tuple[Nullable[Chain], Chain]]:
|
||||
def loose(self) -> Verification[tuple[Nullable[Link], Link]]:
|
||||
return self
|
||||
|
3
rainbowadn/flow13/__init__.py
Normal file
3
rainbowadn/flow13/__init__.py
Normal file
@ -0,0 +1,3 @@
|
||||
__all__ = ('FlowStandard',)
|
||||
|
||||
from ._flowstandard import FlowStandard
|
194
rainbowadn/flow13/_binaryflow.py
Normal file
194
rainbowadn/flow13/_binaryflow.py
Normal file
@ -0,0 +1,194 @@
|
||||
from typing import Generic, TypeAlias, TypeVar
|
||||
|
||||
from rainbowadn.collection.comparison import *
|
||||
from rainbowadn.collection.trees.binary.actions import *
|
||||
from rainbowadn.collection.trees.binary.core import *
|
||||
from rainbowadn.collection.trees.binary.core import BinaryProtocolized
|
||||
from rainbowadn.core import *
|
||||
from rainbowadn.flow.core import *
|
||||
|
||||
__all__ = ('BinaryReducer', 'VerifySubsetAction', 'CheckResult',)
|
||||
|
||||
KeyT = TypeVar('KeyT')
|
||||
MetadataT = TypeVar('MetadataT')
|
||||
TreeT = TypeVar('TreeT')
|
||||
Out = TypeVar('Out')
|
||||
|
||||
BP: TypeAlias = BinaryProtocolized[KeyT, MetadataT, TreeT]
|
||||
PBS: TypeAlias = ProtocolizedBinarySplit[KeyT, MetadataT, TreeT]
|
||||
|
||||
|
||||
class BinaryReducer(
|
||||
Reducer[KeyT, Out],
|
||||
Generic[Out, KeyT, MetadataT, TreeT]
|
||||
):
|
||||
def __init__(self, protocolized: BP):
|
||||
assert isinstance(protocolized, BinaryProtocolized)
|
||||
self.protocolized = protocolized
|
||||
|
||||
async def reduce(self, reduce: Reduce[KeyT, Out]) -> Out:
|
||||
assert isinstance(reduce, Reduce)
|
||||
return await BinaryReducerAction(reduce).on(self.protocolized)
|
||||
|
||||
def loose(self) -> Reducer[KeyT, Out]:
|
||||
return self
|
||||
|
||||
|
||||
class BinaryReducerAction(
|
||||
BinaryAction[KeyT, MetadataT, TreeT, Out],
|
||||
Generic[KeyT, MetadataT, TreeT, Out],
|
||||
):
|
||||
def __init__(self, reduce: Reduce[KeyT, Out]):
|
||||
assert isinstance(reduce, Reduce)
|
||||
self.reduce = reduce
|
||||
|
||||
async def on_null(self, protocolized: BP) -> Out:
|
||||
assert isinstance(protocolized, BinaryProtocolized)
|
||||
return self.reduce.initial
|
||||
|
||||
async def on_split(self, case: PBS) -> Out:
|
||||
assert isinstance(case, ProtocolizedBinarySplit)
|
||||
left: Out
|
||||
key: KeyT
|
||||
right: Out
|
||||
left, key, right = await gather(
|
||||
self.on(case.protocolizedl()),
|
||||
case.split.key.resolve(),
|
||||
self.on(case.protocolizedr()),
|
||||
)
|
||||
|
||||
return await self.reduce.merge(
|
||||
await self.reduce.reduce(
|
||||
left,
|
||||
key
|
||||
),
|
||||
right,
|
||||
)
|
||||
|
||||
|
||||
class NotFound:
|
||||
def __lt__(self, other):
|
||||
if isinstance(other, NotFound):
|
||||
return False
|
||||
elif isinstance(other, Found):
|
||||
return True
|
||||
elif isinstance(other, SubTree):
|
||||
return True
|
||||
else:
|
||||
return NotImplemented
|
||||
|
||||
|
||||
class Found:
|
||||
def __lt__(self, other):
|
||||
if isinstance(other, NotFound):
|
||||
return False
|
||||
elif isinstance(other, Found):
|
||||
return False
|
||||
elif isinstance(other, SubTree):
|
||||
return True
|
||||
else:
|
||||
return NotImplemented
|
||||
|
||||
|
||||
class SubTree:
|
||||
def __lt__(self, other):
|
||||
if isinstance(other, NotFound):
|
||||
return False
|
||||
elif isinstance(other, Found):
|
||||
return False
|
||||
elif isinstance(other, SubTree):
|
||||
return False
|
||||
else:
|
||||
return NotImplemented
|
||||
|
||||
|
||||
CheckResult: TypeAlias = NotFound | Found | SubTree
|
||||
|
||||
|
||||
class SubCheck(
|
||||
CompareAction[KeyT, MetadataT, TreeT, CheckResult],
|
||||
Generic[KeyT, MetadataT, TreeT],
|
||||
):
|
||||
def __init__(self, split: PBS):
|
||||
assert isinstance(split, ProtocolizedBinarySplit)
|
||||
self.split = split
|
||||
super().__init__(split.split.key)
|
||||
|
||||
async def on_equal(self, case: PBS, equal: Equal) -> CheckResult:
|
||||
assert isinstance(case, ProtocolizedBinarySplit)
|
||||
assert isinstance(equal, Equal)
|
||||
if case.tree == self.split.tree:
|
||||
return SubTree()
|
||||
else:
|
||||
return Found()
|
||||
|
||||
async def on_left(self, case: PBS) -> CheckResult:
|
||||
assert isinstance(case, ProtocolizedBinarySplit)
|
||||
return await self.on(case.protocolizedl())
|
||||
|
||||
async def on_right(self, case: PBS) -> CheckResult:
|
||||
assert isinstance(case, ProtocolizedBinarySplit)
|
||||
return await self.on(case.protocolizedr())
|
||||
|
||||
async def on_null(self, protocolized: BP) -> CheckResult:
|
||||
assert isinstance(protocolized, BinaryProtocolized)
|
||||
return NotFound()
|
||||
|
||||
|
||||
class SubCheckMapper(
|
||||
Mapper[BP, CheckResult],
|
||||
Generic[KeyT, MetadataT, TreeT],
|
||||
):
|
||||
def __init__(self, split: PBS):
|
||||
assert isinstance(split, ProtocolizedBinarySplit)
|
||||
self.split = split
|
||||
|
||||
async def map(self, element: BP) -> CheckResult:
|
||||
assert isinstance(element, BinaryProtocolized)
|
||||
return await SubCheck(self.split).on(element)
|
||||
|
||||
def loose(self) -> Mapper[BP, CheckResult]:
|
||||
return self
|
||||
|
||||
|
||||
class VerifySubsetAction(
|
||||
BinaryAction[KeyT, MetadataT, TreeT, bool],
|
||||
Generic[KeyT, MetadataT, TreeT],
|
||||
):
|
||||
def __init__(self, reducer: Reducer[BP, CheckResult]):
|
||||
assert isinstance(reducer, Reducer)
|
||||
self.reducer = reducer
|
||||
|
||||
async def on_null(self, protocolized: BP) -> bool:
|
||||
assert isinstance(protocolized, BinaryProtocolized)
|
||||
return True
|
||||
|
||||
async def on_split(self, case: PBS) -> bool:
|
||||
assert isinstance(case, ProtocolizedBinarySplit)
|
||||
reduce: Reduce[BP, CheckResult] = MapReduce(
|
||||
SubCheckMapper(case).loose(), VerifySubsetReduce(NotFound())
|
||||
).loose()
|
||||
assert isinstance(reduce, Reduce)
|
||||
result: CheckResult = await self.reducer.reduce(reduce)
|
||||
match result:
|
||||
case SubTree():
|
||||
return True
|
||||
case Found():
|
||||
assert_trues(
|
||||
await gather(
|
||||
self.on(case.protocolizedl()),
|
||||
self.on(case.protocolizedr()),
|
||||
)
|
||||
)
|
||||
return True
|
||||
case NotFound():
|
||||
raise ValueError('subset check failed')
|
||||
case _:
|
||||
raise TypeError
|
||||
|
||||
|
||||
class VerifySubsetReduce(
|
||||
PureReduce[CheckResult]
|
||||
):
|
||||
async def merge(self, left: CheckResult, right: CheckResult) -> CheckResult:
|
||||
return max(left, right)
|
12
rainbowadn/flow13/_flowbank.py
Normal file
12
rainbowadn/flow13/_flowbank.py
Normal file
@ -0,0 +1,12 @@
|
||||
from ._flowstandard import *
|
||||
from ._flowtransaction import *
|
||||
|
||||
__all__ = ('FlowBank',)
|
||||
|
||||
|
||||
class FlowBank:
|
||||
async def minted(self) -> FlowStandard[FlowCoin]:
|
||||
raise NotImplementedError
|
||||
|
||||
async def used(self) -> FlowStandard[FlowCoin]:
|
||||
raise NotImplementedError
|
121
rainbowadn/flow13/_flowbankverification.py
Normal file
121
rainbowadn/flow13/_flowbankverification.py
Normal file
@ -0,0 +1,121 @@
|
||||
from rainbowadn.core import *
|
||||
from rainbowadn.flow.bridge import *
|
||||
from rainbowadn.flow.primitive import *
|
||||
from rainbowadn.flow.verification.core import *
|
||||
from rainbowadn.nullability import *
|
||||
from ._flowbank import *
|
||||
from ._flowcheque import *
|
||||
from ._flowstandard import *
|
||||
from ._flowtransaction import *
|
||||
|
||||
__all__ = ('FlowBankVerification',)
|
||||
|
||||
|
||||
class FlowBankVerification(
|
||||
Verification[tuple[Nullable[FlowBank], FlowCheque, FlowBank]],
|
||||
):
|
||||
def __init__(self, initial: FlowBank):
|
||||
assert isinstance(initial, FlowBank)
|
||||
self.initial = initial
|
||||
|
||||
@classmethod
|
||||
async def _verify_disjoint_union(
|
||||
cls,
|
||||
per_previous: FlowStandard[FlowCoin],
|
||||
per_cheque: FlowStandard[FlowCoin],
|
||||
per_bank: FlowStandard[FlowCoin],
|
||||
) -> bool:
|
||||
assert isinstance(per_previous, FlowStandard)
|
||||
assert isinstance(per_cheque, FlowStandard)
|
||||
assert isinstance(per_bank, FlowStandard)
|
||||
|
||||
async def verify_no_intersection():
|
||||
assert_true(await per_previous.verify_does_not_intersect(await per_cheque.reducer()))
|
||||
return True
|
||||
|
||||
assert_trues(
|
||||
await gather(
|
||||
per_previous.verify_subset(UnitReducer(per_bank)),
|
||||
per_cheque.verify_subset(UnitReducer(per_bank)),
|
||||
per_bank.verify_subset(ListBridge([per_previous, per_cheque])),
|
||||
verify_no_intersection(),
|
||||
)
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
async def _verify(
|
||||
cls,
|
||||
previous: FlowBank,
|
||||
cheque: FlowCheque,
|
||||
bank: FlowBank,
|
||||
) -> bool:
|
||||
assert isinstance(previous, FlowBank)
|
||||
assert isinstance(cheque, FlowCheque)
|
||||
assert isinstance(bank, FlowBank)
|
||||
|
||||
async def verify_unique_minted():
|
||||
previous_minted: FlowStandard[FlowCoin] = await previous.minted()
|
||||
cheque_minted: FlowStandard[FlowCoin] = await cheque.minted()
|
||||
bank_minted: FlowStandard[FlowCoin] = await bank.minted()
|
||||
assert_true(
|
||||
await cls._verify_disjoint_union(
|
||||
previous_minted,
|
||||
cheque_minted,
|
||||
bank_minted,
|
||||
)
|
||||
)
|
||||
return True
|
||||
|
||||
async def verify_unique_used():
|
||||
previous_used: FlowStandard[FlowCoin] = await previous.used()
|
||||
cheque_used: FlowStandard[FlowCoin] = await cheque.used()
|
||||
bank_used: FlowStandard[FlowCoin] = await bank.used()
|
||||
assert_true(
|
||||
await cls._verify_disjoint_union(
|
||||
previous_used,
|
||||
cheque_used,
|
||||
bank_used,
|
||||
)
|
||||
)
|
||||
return True
|
||||
|
||||
async def verify_used_were_minted():
|
||||
bank_minted: FlowStandard[FlowCoin] = await bank.minted()
|
||||
cheque_used: FlowStandard[FlowCoin] = await cheque.used()
|
||||
assert_true(
|
||||
await cheque_used.verify_subset(UnitReducer(bank_minted))
|
||||
)
|
||||
return True
|
||||
|
||||
assert_trues(
|
||||
await gather(
|
||||
cheque.verify(),
|
||||
verify_unique_minted(),
|
||||
verify_unique_used(),
|
||||
verify_used_were_minted(),
|
||||
)
|
||||
)
|
||||
return True
|
||||
|
||||
async def verify(
|
||||
self,
|
||||
element: tuple[Nullable[FlowBank], FlowCheque, FlowBank]
|
||||
) -> bool:
|
||||
assert isinstance(element, tuple)
|
||||
previous: Nullable[FlowBank]
|
||||
cheque: FlowCheque
|
||||
bank: FlowBank
|
||||
previous, cheque, bank = element
|
||||
assert isinstance(previous, Nullable)
|
||||
assert isinstance(cheque, FlowCheque)
|
||||
assert isinstance(bank, FlowBank)
|
||||
previous_bank: FlowBank
|
||||
if previous.null():
|
||||
previous_bank = self.initial
|
||||
else:
|
||||
previous_bank = previous.resolve()
|
||||
assert isinstance(previous_bank, FlowBank)
|
||||
assert_true(await self._verify(previous_bank, cheque, bank))
|
||||
return True
|
286
rainbowadn/flow13/_flowcheque.py
Normal file
286
rainbowadn/flow13/_flowcheque.py
Normal file
@ -0,0 +1,286 @@
|
||||
from rainbowadn.collection.keyvalue import *
|
||||
from rainbowadn.core import *
|
||||
from rainbowadn.flow.core import *
|
||||
from rainbowadn.flow.verification.core import *
|
||||
from ._flowstandard import *
|
||||
from ._flowtransaction import *
|
||||
|
||||
__all__ = ('FlowCheque',)
|
||||
|
||||
|
||||
class SumReduce(PureReduce[int]):
|
||||
async def merge(self, left: int, right: int) -> int:
|
||||
return left + right
|
||||
|
||||
|
||||
class ValueMapper(Mapper[FlowCoin, int]):
|
||||
async def map(self, element: FlowCoin) -> int:
|
||||
assert isinstance(element, FlowCoin)
|
||||
return await element.int_value()
|
||||
|
||||
|
||||
class FlowCheque:
|
||||
async def transactions(self) -> FlowStandard[FlowTransaction]:
|
||||
raise NotImplementedError
|
||||
|
||||
async def minted(self) -> FlowStandard[FlowCoin]:
|
||||
raise NotImplementedError
|
||||
|
||||
async def used(self) -> FlowStandard[FlowCoin]:
|
||||
raise NotImplementedError
|
||||
|
||||
async def usedx(self) -> FlowStandard[KeyValue[FlowCoin, FlowTransaction]]:
|
||||
raise NotImplementedError
|
||||
|
||||
async def mint(self) -> int:
|
||||
raise NotImplementedError
|
||||
|
||||
@classmethod
|
||||
async def total_of(cls, tree: FlowStandard[FlowCoin]) -> int:
|
||||
assert isinstance(tree, FlowStandard)
|
||||
reducer: Reducer[FlowCoin] = await tree.reducer()
|
||||
assert isinstance(reducer, Reducer)
|
||||
total: int = await reducer.reduce(MapReduce(ValueMapper(), SumReduce(0)))
|
||||
assert isinstance(total, int)
|
||||
return total
|
||||
|
||||
async def total_minted(self) -> int:
|
||||
return await self.total_of(await self.minted())
|
||||
|
||||
async def total_used(self) -> int:
|
||||
return await self.total_of(await self.used())
|
||||
|
||||
async def extra(self) -> int:
|
||||
mint: int
|
||||
total_minted: int
|
||||
total_used: int
|
||||
mint, total_minted, total_used = await gather(
|
||||
self.mint(),
|
||||
self.total_minted(),
|
||||
self.total_used(),
|
||||
)
|
||||
assert isinstance(mint, int)
|
||||
assert isinstance(total_minted, int)
|
||||
assert isinstance(total_used, int)
|
||||
return mint + total_minted - total_used
|
||||
|
||||
async def _verify_extra(self) -> bool:
|
||||
assert (await self.extra()) >= 0
|
||||
return True
|
||||
|
||||
async def _verify_transactions(self) -> bool:
|
||||
assert_true(
|
||||
await (await self.transactions()).verify(TransactionVerification(self).loose())
|
||||
)
|
||||
return True
|
||||
|
||||
async def _verify_minted(self) -> bool:
|
||||
assert_true(
|
||||
await (await self.minted()).verify(MintedVerification(self).loose())
|
||||
)
|
||||
return True
|
||||
|
||||
async def _verify_used(self) -> bool:
|
||||
assert_true(
|
||||
await (await self.used()).verify(UsedVerification(self).loose())
|
||||
)
|
||||
return True
|
||||
|
||||
async def _verify_usedx(self) -> bool:
|
||||
assert_true(
|
||||
await (await self.usedx()).verify(UsedXVerification(self).loose())
|
||||
)
|
||||
return True
|
||||
|
||||
async def verify(self) -> bool:
|
||||
assert_trues(
|
||||
await gather(
|
||||
self._verify_extra(),
|
||||
self._verify_transactions(),
|
||||
self._verify_minted(),
|
||||
self._verify_used(),
|
||||
self._verify_usedx(),
|
||||
)
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
class TransactionVerification(
|
||||
Verification[FlowTransaction]
|
||||
):
|
||||
def __init__(self, cheque: FlowCheque):
|
||||
assert isinstance(cheque, FlowCheque)
|
||||
self.cheque = cheque
|
||||
|
||||
@classmethod
|
||||
def usedx_reducer(cls, reducer: Reducer[FlowCoin, bool], transaction: FlowTransaction):
|
||||
assert isinstance(reducer, Reducer)
|
||||
assert isinstance(transaction, FlowTransaction)
|
||||
|
||||
def usedx(coin: FlowCoin) -> KeyValue[FlowCoin, FlowTransaction]:
|
||||
assert isinstance(coin, FlowCoin)
|
||||
return KeyValue(
|
||||
HashPoint.of(coin),
|
||||
HashPoint.of(transaction)
|
||||
)
|
||||
|
||||
usedx_reducer: Reducer[KeyValue[FlowCoin, FlowTransaction], bool] = MapReducer(
|
||||
CallableMapper(usedx),
|
||||
reducer
|
||||
)
|
||||
assert isinstance(usedx_reducer, Reducer)
|
||||
return usedx_reducer
|
||||
|
||||
async def _verify_transaction_minted(self, transaction: FlowTransaction) -> bool:
|
||||
assert isinstance(transaction, FlowTransaction)
|
||||
minted: FlowStandard[FlowCoin]
|
||||
minted_reducer: Reducer[FlowCoin, bool]
|
||||
minted, minted_reducer = await gather(
|
||||
self.cheque.minted(),
|
||||
transaction.minted_reducer(),
|
||||
)
|
||||
assert isinstance(minted, FlowStandard)
|
||||
assert isinstance(minted_reducer, Reducer)
|
||||
assert_true(
|
||||
await minted.verify_contains_all(
|
||||
minted_reducer
|
||||
)
|
||||
)
|
||||
return True
|
||||
|
||||
async def _verify_transaction_used(self, transaction: FlowTransaction) -> bool:
|
||||
assert isinstance(transaction, FlowTransaction)
|
||||
minted: FlowStandard[FlowCoin]
|
||||
minted_reducer: Reducer[FlowCoin, bool]
|
||||
used, used_reducer = await gather(
|
||||
self.cheque.used(),
|
||||
transaction.used_reducer(),
|
||||
)
|
||||
assert isinstance(used, FlowStandard)
|
||||
assert isinstance(used_reducer, Reducer)
|
||||
assert_true(
|
||||
await used.verify_contains_all(
|
||||
used_reducer
|
||||
)
|
||||
)
|
||||
return True
|
||||
|
||||
async def _verify_transaction_usedx(self, transaction: FlowTransaction) -> bool:
|
||||
minted: FlowStandard[KeyValue[FlowCoin, FlowTransaction]]
|
||||
minted_reducer: Reducer[FlowCoin, bool]
|
||||
usedx, used_reducer = await gather(
|
||||
self.cheque.usedx(),
|
||||
transaction.used_reducer(),
|
||||
)
|
||||
assert isinstance(usedx, FlowStandard)
|
||||
assert isinstance(used_reducer, Reducer)
|
||||
assert_true(
|
||||
await usedx.verify_contains_all(
|
||||
self.usedx_reducer(used_reducer, transaction)
|
||||
)
|
||||
)
|
||||
return True
|
||||
|
||||
async def verify(self, element: FlowTransaction) -> bool:
|
||||
assert isinstance(element, FlowTransaction)
|
||||
assert_trues(
|
||||
await gather(
|
||||
self._verify_transaction_minted(element),
|
||||
self._verify_transaction_used(element),
|
||||
self._verify_transaction_usedx(element),
|
||||
)
|
||||
)
|
||||
return True
|
||||
|
||||
def loose(self) -> Verification[FlowTransaction]:
|
||||
return self
|
||||
|
||||
|
||||
class MintedVerification(
|
||||
Verification[FlowCoin]
|
||||
):
|
||||
def __init__(self, cheque: FlowCheque):
|
||||
assert isinstance(cheque, FlowCheque)
|
||||
self.cheque = cheque
|
||||
|
||||
async def verify(self, element: FlowCoin) -> bool:
|
||||
assert isinstance(element, FlowCoin)
|
||||
transactions: FlowStandard[FlowTransaction]
|
||||
transaction: FlowTransaction
|
||||
transactions, transaction = await gather(
|
||||
self.cheque.transactions(),
|
||||
element.transaction.resolve(),
|
||||
)
|
||||
assert isinstance(transactions, FlowStandard)
|
||||
assert isinstance(transaction, FlowTransaction)
|
||||
assert_true(
|
||||
await transactions.contains(
|
||||
transaction
|
||||
)
|
||||
)
|
||||
return True
|
||||
|
||||
def loose(self) -> Verification[FlowCoin]:
|
||||
return self
|
||||
|
||||
|
||||
class UsedVerification(
|
||||
Verification[FlowCoin]
|
||||
):
|
||||
def __init__(self, cheque: FlowCheque):
|
||||
assert isinstance(cheque, FlowCheque)
|
||||
self.cheque = cheque
|
||||
|
||||
async def verify(self, element: FlowCoin) -> bool:
|
||||
assert isinstance(element, FlowCoin)
|
||||
assert_true(
|
||||
await (await self.cheque.usedx()).contains(
|
||||
KeyValue(HashPoint.of(element), HashPoint.of(FlowTransaction.empty()))
|
||||
)
|
||||
)
|
||||
return True
|
||||
|
||||
def loose(self) -> Verification[FlowCoin]:
|
||||
return self
|
||||
|
||||
|
||||
class UsedXVerification(
|
||||
Verification[KeyValue[FlowCoin, FlowTransaction]]
|
||||
):
|
||||
def __init__(self, cheque: FlowCheque):
|
||||
assert isinstance(cheque, FlowCheque)
|
||||
self.cheque = cheque
|
||||
|
||||
async def _verify_transaction(self, transaction: FlowTransaction) -> bool:
|
||||
assert isinstance(transaction, FlowTransaction)
|
||||
assert_true(
|
||||
await (await self.cheque.transactions()).contains(
|
||||
transaction
|
||||
)
|
||||
)
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
async def _verify_transaction_contains(cls, transaction: FlowTransaction, coin: HashPoint[FlowCoin]) -> bool:
|
||||
assert isinstance(transaction, FlowTransaction)
|
||||
assert isinstance(coin, HashPoint)
|
||||
in_coin: HashPoint[FlowCoin]
|
||||
async for in_coin in (await transaction.data_resolved()).iter_in_coins():
|
||||
if in_coin == coin:
|
||||
return True
|
||||
raise ValueError('used coin not found in transaction')
|
||||
|
||||
async def verify(self, element: KeyValue[FlowCoin, FlowTransaction]) -> bool:
|
||||
assert isinstance(element, KeyValue)
|
||||
transaction: FlowTransaction = await element.value.resolve()
|
||||
assert isinstance(transaction, FlowTransaction)
|
||||
assert_trues(
|
||||
await gather(
|
||||
self._verify_transaction(transaction),
|
||||
self._verify_transaction_contains(transaction, element.key),
|
||||
)
|
||||
)
|
||||
return True
|
||||
|
||||
def loose(self) -> Verification[KeyValue[FlowCoin, FlowTransaction]]:
|
||||
return self
|
45
rainbowadn/flow13/_flowstandard.py
Normal file
45
rainbowadn/flow13/_flowstandard.py
Normal file
@ -0,0 +1,45 @@
|
||||
from typing import Any, Generic, TypeAlias, TypeVar
|
||||
|
||||
from rainbowadn.atomic import *
|
||||
from rainbowadn.collection.trees.binary import *
|
||||
from rainbowadn.collection.trees.binary.actions import *
|
||||
from rainbowadn.collection.trees.binary.core import *
|
||||
from rainbowadn.core import *
|
||||
from rainbowadn.flow.core import *
|
||||
from ._binaryflow import *
|
||||
from ._flowtree import *
|
||||
|
||||
__all__ = ('FlowStandard',)
|
||||
|
||||
KeyT = TypeVar('KeyT')
|
||||
FS: TypeAlias = 'FlowStandard[KeyT]'
|
||||
ABT: TypeAlias = 'ActiveBinaryTree[KeyT, Integer]'
|
||||
BP: TypeAlias = 'BinaryProtocolized[KeyT, Integer, ABT]'
|
||||
|
||||
|
||||
class FlowStandard(
|
||||
FlowTree[KeyT, FS],
|
||||
Generic[KeyT]
|
||||
):
|
||||
def __init__(self, protocolized: BP):
|
||||
assert isinstance(protocolized, BinaryProtocolized)
|
||||
self.protocolized = protocolized
|
||||
|
||||
async def contains(self, key: KeyT) -> bool:
|
||||
return await ContainsAction(key).on(self.protocolized)
|
||||
|
||||
def _protocolized(self: FS) -> BP:
|
||||
return self.protocolized
|
||||
|
||||
@classmethod
|
||||
def _protocolized_mapper(cls) -> Mapper[FS, BP]:
|
||||
return CallableMapper(cls._protocolized)
|
||||
|
||||
async def verify_subset(self, trees: Reducer[FS, CheckResult]) -> bool:
|
||||
assert isinstance(trees, Reducer)
|
||||
reducer: Reducer[BP, CheckResult] = MapReducer(self._protocolized_mapper(), trees)
|
||||
assert_true(await VerifySubsetAction(reducer).on(self.protocolized))
|
||||
return True
|
||||
|
||||
async def reducer(self) -> Reducer[KeyT, Any]:
|
||||
return BinaryReducer(self.protocolized).loose()
|
333
rainbowadn/flow13/_flowtransaction.py
Normal file
333
rainbowadn/flow13/_flowtransaction.py
Normal file
@ -0,0 +1,333 @@
|
||||
from typing import Any, AsyncIterable, Iterable
|
||||
|
||||
import nacl.signing
|
||||
|
||||
from rainbowadn.atomic import *
|
||||
from rainbowadn.collection.linear import *
|
||||
from rainbowadn.core import *
|
||||
from rainbowadn.flow.bridge import *
|
||||
from rainbowadn.flow.core import *
|
||||
from rainbowadn.nullability import *
|
||||
from rainbowadn.v13 import *
|
||||
|
||||
__all__ = ('FlowCoinData', 'FlowCoin', 'FlowTransactionData', 'FlowTransaction',)
|
||||
|
||||
|
||||
class FlowCoinData(RecursiveMentionable, StaticMentionable):
|
||||
def __init__(
|
||||
self,
|
||||
owner: HashPoint[Subject],
|
||||
value: HashPoint[Integer]
|
||||
):
|
||||
assert isinstance(owner, HashPoint)
|
||||
assert isinstance(value, HashPoint)
|
||||
self.owner = owner
|
||||
self.value = value
|
||||
|
||||
async def int_value(self) -> int:
|
||||
return (await self.value.resolve()).integer
|
||||
|
||||
@classmethod
|
||||
def of(cls, owner: Subject, value: int) -> 'FlowCoinData':
|
||||
assert isinstance(owner, Subject)
|
||||
assert isinstance(value, int)
|
||||
return cls(HashPoint.of(owner), HashPoint.of(Integer(value)))
|
||||
|
||||
def points(self) -> Iterable[HashPoint]:
|
||||
return [self.owner, self.value]
|
||||
|
||||
def __bytes__(self):
|
||||
return bytes(self.owner) + bytes(self.value)
|
||||
|
||||
@classmethod
|
||||
def from_bytes(cls, source: bytes, resolver: HashResolver) -> 'FlowCoinData':
|
||||
assert isinstance(source, bytes)
|
||||
assert isinstance(resolver, HashResolver)
|
||||
return cls(
|
||||
ResolverOrigin(Subject.factory(), source[:HashPoint.HASH_LENGTH], resolver).hash_point(),
|
||||
ResolverOrigin(Integer.factory(), source[HashPoint.HASH_LENGTH:], resolver).hash_point(),
|
||||
)
|
||||
|
||||
async def str(self, tab: int) -> str:
|
||||
assert isinstance(tab, int)
|
||||
owner_str, value_str = await gather(
|
||||
hash_point_format(self.owner, tab),
|
||||
hash_point_format(self.value, tab),
|
||||
)
|
||||
assert isinstance(owner_str, str)
|
||||
assert isinstance(value_str, str)
|
||||
return f'{owner_str}' \
|
||||
f'{tabulate(tab)}{value_str}'
|
||||
|
||||
|
||||
class FlowCoin(RecursiveMentionable, StaticMentionable):
|
||||
def __init__(
|
||||
self,
|
||||
data: HashPoint[FlowCoinData],
|
||||
transaction: HashPoint['FlowTransaction'],
|
||||
index: HashPoint[Integer]
|
||||
):
|
||||
assert isinstance(data, HashPoint)
|
||||
assert isinstance(transaction, HashPoint)
|
||||
assert isinstance(index, HashPoint)
|
||||
self.data = data
|
||||
self.transaction = transaction
|
||||
self.index = index
|
||||
|
||||
async def data_resolved(self) -> FlowCoinData:
|
||||
return await self.data.resolve()
|
||||
|
||||
def points(self) -> Iterable[HashPoint]:
|
||||
return [self.data, self.transaction, self.index]
|
||||
|
||||
def __bytes__(self):
|
||||
return bytes(self.data) + bytes(self.transaction) + bytes(self.index)
|
||||
|
||||
@classmethod
|
||||
def from_bytes(cls, source: bytes, resolver: HashResolver) -> 'FlowCoin':
|
||||
assert isinstance(source, bytes)
|
||||
assert isinstance(resolver, HashResolver)
|
||||
return cls(
|
||||
ResolverOrigin(FlowCoinData.factory(), source[:HashPoint.HASH_LENGTH], resolver).hash_point(),
|
||||
ResolverOrigin(
|
||||
FlowTransaction.factory(), source[HashPoint.HASH_LENGTH:2 * HashPoint.HASH_LENGTH], resolver
|
||||
).hash_point(),
|
||||
ResolverOrigin(Integer.factory(), source[2 * HashPoint.HASH_LENGTH:], resolver).hash_point(),
|
||||
)
|
||||
|
||||
async def str(self, tab: int) -> str:
|
||||
assert isinstance(tab, int)
|
||||
data_str, index_str = await gather(
|
||||
hash_point_format(self.data, tab + 1),
|
||||
hash_point_format(self.index, tab + 1),
|
||||
)
|
||||
assert isinstance(data_str, str)
|
||||
assert isinstance(index_str, str)
|
||||
return f'(' \
|
||||
f'{tabulate(tab + 1)}coin' \
|
||||
f'{tabulate(tab + 1)}{data_str}' \
|
||||
f'{tabulate(tab + 1)}(origin)' \
|
||||
f'{tabulate(tab + 1)}{index_str}' \
|
||||
f'{tabulate(tab)})'
|
||||
|
||||
async def int_value(self) -> int:
|
||||
return await (await self.data_resolved()).int_value()
|
||||
|
||||
async def owner_resolved(self) -> Subject:
|
||||
return await (await self.data_resolved()).owner.resolve()
|
||||
|
||||
|
||||
class FlowTransactionData(RecursiveMentionable, StaticMentionable):
|
||||
def __init__(
|
||||
self,
|
||||
in_coins: NullableReference[Stack[FlowCoin]],
|
||||
out_coins: NullableReference[Stack[FlowCoinData]],
|
||||
):
|
||||
assert isinstance(in_coins, NullableReference)
|
||||
assert isinstance(out_coins, NullableReference)
|
||||
self.in_coins = in_coins
|
||||
self.out_coins = out_coins
|
||||
self.hash_point = HashPoint.of(self)
|
||||
assert isinstance(self.hash_point, HashPoint)
|
||||
|
||||
def points(self) -> Iterable[HashPoint]:
|
||||
return [*self.in_coins.points(), *self.out_coins.points()]
|
||||
|
||||
def __bytes__(self):
|
||||
return bytes(self.in_coins) + bytes(self.out_coins)
|
||||
|
||||
@classmethod
|
||||
def from_bytes(cls, source: bytes, resolver: HashResolver) -> 'FlowTransactionData':
|
||||
assert isinstance(source, bytes)
|
||||
assert isinstance(resolver, HashResolver)
|
||||
return cls(
|
||||
NullableReferenceFactory(
|
||||
StackFactory(FlowCoin.factory()).loose()
|
||||
).from_bytes(source[:HashPoint.HASH_LENGTH], resolver),
|
||||
NullableReferenceFactory(
|
||||
StackFactory(FlowCoinData.factory()).loose()
|
||||
).from_bytes(source[HashPoint.HASH_LENGTH:], resolver),
|
||||
)
|
||||
|
||||
async def _signature_verify(self, coin: FlowCoin, signature: Signature) -> bool:
|
||||
assert isinstance(coin, FlowCoin)
|
||||
assert isinstance(signature, Signature)
|
||||
assert_true(
|
||||
signature.verify(
|
||||
await coin.owner_resolved(),
|
||||
self.hash_point
|
||||
)
|
||||
)
|
||||
return True
|
||||
|
||||
async def _verify_signatures(
|
||||
self,
|
||||
signatures: NullableReference[Stack[Signature]]
|
||||
) -> bool:
|
||||
assert isinstance(signatures, NullableReference)
|
||||
assert_trues(
|
||||
await gather(
|
||||
*[
|
||||
self._signature_verify(coin, signature)
|
||||
for
|
||||
coin, signature
|
||||
in
|
||||
zip(
|
||||
await self.in_coins_resolved(),
|
||||
await Stack.list(signatures),
|
||||
strict=True
|
||||
)
|
||||
]
|
||||
)
|
||||
)
|
||||
return True
|
||||
|
||||
def iter_in_coins(self) -> AsyncIterable[HashPoint[FlowCoin]]:
|
||||
return Stack.iter(self.in_coins)
|
||||
|
||||
async def coins(self) -> list[FlowCoin]:
|
||||
return [await x.resolve() async for x in self.iter_in_coins()]
|
||||
|
||||
async def in_coins_resolved(self) -> list[FlowCoin]:
|
||||
return await Stack.list(self.in_coins)
|
||||
|
||||
def iter_out_coins(self) -> AsyncIterable[HashPoint[FlowCoinData]]:
|
||||
return Stack.iter(self.out_coins)
|
||||
|
||||
async def out_coins_resolved(self) -> list[FlowCoinData]:
|
||||
return await Stack.list(self.out_coins)
|
||||
|
||||
async def verify(
|
||||
self,
|
||||
signatures: NullableReference[Stack[Signature]]
|
||||
) -> bool:
|
||||
assert isinstance(signatures, NullableReference)
|
||||
assert_true(await self._verify_signatures(signatures))
|
||||
return True
|
||||
|
||||
async def str(self, tab: int) -> str:
|
||||
assert isinstance(tab, int)
|
||||
in_str, out_str = await gather(
|
||||
self.in_coins.str(tab),
|
||||
self.out_coins.str(tab),
|
||||
)
|
||||
assert isinstance(in_str, str)
|
||||
assert isinstance(out_str, str)
|
||||
return f'(in)' \
|
||||
f'{tabulate(tab)}{in_str}' \
|
||||
f'{tabulate(tab)}(out)' \
|
||||
f'{tabulate(tab)}{out_str}'
|
||||
|
||||
|
||||
class FlowTransaction(RecursiveMentionable, StaticMentionable):
|
||||
def __init__(
|
||||
self,
|
||||
data: HashPoint[FlowTransactionData],
|
||||
signatures: NullableReference[Stack[Signature]]
|
||||
):
|
||||
assert isinstance(data, HashPoint)
|
||||
assert isinstance(signatures, NullableReference)
|
||||
self.data = data
|
||||
self.signatures = signatures
|
||||
self.hash_point = HashPoint.of(self)
|
||||
assert isinstance(self.hash_point, HashPoint)
|
||||
|
||||
async def data_resolved(self) -> FlowTransactionData:
|
||||
return await self.data.resolve()
|
||||
|
||||
def points(self) -> Iterable[HashPoint]:
|
||||
return [self.data, *self.signatures.points()]
|
||||
|
||||
def __bytes__(self):
|
||||
return bytes(self.data) + bytes(self.signatures)
|
||||
|
||||
@classmethod
|
||||
def from_bytes(cls, source: bytes, resolver: HashResolver) -> 'FlowTransaction':
|
||||
assert isinstance(source, bytes)
|
||||
assert isinstance(resolver, HashResolver)
|
||||
signature_factory: RainbowFactory[Signature] = Signature.factory()
|
||||
assert isinstance(signature_factory, RainbowFactory)
|
||||
stack_factory: RainbowFactory[Stack[Signature]] = StackFactory(signature_factory).loose()
|
||||
assert isinstance(stack_factory, RainbowFactory)
|
||||
return cls(
|
||||
ResolverOrigin(FlowTransactionData.factory(), source[:HashPoint.HASH_LENGTH], resolver).hash_point(),
|
||||
NullableReferenceFactory(stack_factory).from_bytes(source[HashPoint.HASH_LENGTH:], resolver),
|
||||
)
|
||||
|
||||
async def iter_coins(
|
||||
self
|
||||
) -> AsyncIterable[FlowCoin]:
|
||||
transaction_data: FlowTransactionData = await self.data_resolved()
|
||||
assert isinstance(transaction_data, FlowTransactionData)
|
||||
index = 0
|
||||
out_coin: HashPoint[FlowCoinData]
|
||||
async for out_coin in transaction_data.iter_out_coins():
|
||||
assert isinstance(out_coin, HashPoint)
|
||||
coin: FlowCoin = FlowCoin(out_coin, self.hash_point, HashPoint.of(Integer(index)))
|
||||
assert isinstance(coin, FlowCoin)
|
||||
yield coin
|
||||
index += 1
|
||||
|
||||
async def coins(
|
||||
self
|
||||
) -> list[FlowCoin]:
|
||||
return [coin async for coin in self.iter_coins()]
|
||||
|
||||
async def used_reducer(self) -> Reducer[FlowCoin, Any]:
|
||||
transaction_data: FlowTransactionData = await self.data_resolved()
|
||||
assert isinstance(transaction_data, FlowTransactionData)
|
||||
bridge: Reducer[FlowCoin, Any] = ListBridge(await transaction_data.coins())
|
||||
assert isinstance(bridge, Reducer)
|
||||
return bridge
|
||||
|
||||
async def minted_reducer(self) -> Reducer[FlowCoin, Any]:
|
||||
bridge: Reducer[FlowCoin, Any] = ListBridge(await self.coins())
|
||||
assert isinstance(bridge, Reducer)
|
||||
return bridge
|
||||
|
||||
async def verify(self):
|
||||
data: FlowTransactionData = await self.data_resolved()
|
||||
assert isinstance(data, FlowTransactionData)
|
||||
assert_true(await data.verify(self.signatures))
|
||||
return True
|
||||
|
||||
async def str(self, tab: int) -> str:
|
||||
assert isinstance(tab, int)
|
||||
data_str, signatures_str = await gather(
|
||||
hash_point_format(self.data, tab + 1),
|
||||
self.signatures.str(tab + 1),
|
||||
)
|
||||
assert isinstance(data_str, str)
|
||||
assert isinstance(signatures_str, str)
|
||||
return f'(' \
|
||||
f'{tabulate(tab + 1)}transaction' \
|
||||
f'{tabulate(tab + 1)}{data_str}' \
|
||||
f'{tabulate(tab + 1)}{signatures_str}' \
|
||||
f'{tabulate(tab)})'
|
||||
|
||||
@classmethod
|
||||
def make(
|
||||
cls,
|
||||
in_coins: list[FlowCoin],
|
||||
out_coins: list[FlowCoinData],
|
||||
keys: list[nacl.signing.SigningKey],
|
||||
) -> 'FlowTransaction':
|
||||
assert isinstance(in_coins, list)
|
||||
assert isinstance(out_coins, list)
|
||||
assert isinstance(keys, list)
|
||||
transaction_data = FlowTransactionData(
|
||||
Stack.off(FlowCoin.factory(), reversed(in_coins)),
|
||||
Stack.off(FlowCoinData.factory(), reversed(out_coins)),
|
||||
)
|
||||
assert isinstance(transaction_data, FlowTransactionData)
|
||||
return FlowTransaction(
|
||||
HashPoint.of(transaction_data),
|
||||
Stack.off(
|
||||
Signature.factory(),
|
||||
(Signature.sign(key, HashPoint.of(transaction_data)) for key in reversed(keys))
|
||||
)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def empty(cls):
|
||||
return cls.make([], [], [])
|
66
rainbowadn/flow13/_flowtree.py
Normal file
66
rainbowadn/flow13/_flowtree.py
Normal file
@ -0,0 +1,66 @@
|
||||
from typing import Any, Generic, TypeVar
|
||||
|
||||
from rainbowadn.core import *
|
||||
from rainbowadn.flow.core import *
|
||||
from rainbowadn.flow.verification.core import *
|
||||
|
||||
__all__ = ('FlowTree',)
|
||||
|
||||
Key = TypeVar('Key')
|
||||
Tree = TypeVar('Tree')
|
||||
|
||||
|
||||
class FlowTree(Generic[Key, Tree]):
|
||||
async def contains(self, key: Key) -> bool:
|
||||
raise NotImplementedError
|
||||
|
||||
async def verify_contains_all(self, keys: Reducer[Key, bool]) -> bool:
|
||||
assert isinstance(keys, Reducer)
|
||||
key_verification: Verification[Key] = ContainsVerification(self)
|
||||
assert isinstance(key_verification, Verification)
|
||||
assert_true(await ReduceVerification(key_verification).loose().verify(keys))
|
||||
return True
|
||||
|
||||
async def verify_does_not_intersect(self, keys: Reducer[Key, bool]) -> bool:
|
||||
assert isinstance(keys, Reducer)
|
||||
key_verification: Verification[Key] = DoesNotContainVerification(self)
|
||||
assert isinstance(key_verification, Verification)
|
||||
assert_true(await ReduceVerification(key_verification).loose().verify(keys))
|
||||
return True
|
||||
|
||||
async def verify_subset(self: Tree, trees: Reducer['FlowTree[Key, Tree]', Any]) -> bool:
|
||||
raise NotImplementedError
|
||||
|
||||
async def reducer(self) -> Reducer[Key, Any]:
|
||||
raise NotImplementedError
|
||||
|
||||
async def verify(self, verification: Verification[Key]) -> bool:
|
||||
assert isinstance(verification, Verification)
|
||||
assert_true(
|
||||
await ReduceVerification(
|
||||
verification
|
||||
).loose().verify(
|
||||
await self.reducer()
|
||||
)
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
class ContainsVerification(Verification[Key], Generic[Key, Tree]):
|
||||
def __init__(self, tree: FlowTree[Key, Tree]):
|
||||
assert isinstance(tree, FlowTree)
|
||||
self.tree = tree
|
||||
|
||||
async def verify(self, element: Key) -> bool:
|
||||
assert_true(await self.tree.contains(element))
|
||||
return True
|
||||
|
||||
|
||||
class DoesNotContainVerification(Verification[Key], Generic[Key, Tree]):
|
||||
def __init__(self, tree: FlowTree[Key, Tree]):
|
||||
assert isinstance(tree, FlowTree)
|
||||
self.tree = tree
|
||||
|
||||
async def verify(self, element: Key) -> bool:
|
||||
assert_false(await self.tree.contains(element))
|
||||
return True
|
@ -1,12 +1,18 @@
|
||||
import os
|
||||
import time
|
||||
import unittest
|
||||
|
||||
from rainbowadn.atomic import *
|
||||
from rainbowadn.collection.comparison import *
|
||||
from rainbowadn.collection.linear import *
|
||||
from rainbowadn.collection.trees.binary import *
|
||||
from rainbowadn.core import *
|
||||
from rainbowadn.flow.bridge import *
|
||||
from rainbowadn.flow.core import *
|
||||
from rainbowadn.flow.primitive import *
|
||||
from rainbowadn.flow.sequence import *
|
||||
from rainbowadn.flow.stacked import *
|
||||
from rainbowadn.flow13 import *
|
||||
from rainbowadn.nullability import *
|
||||
|
||||
|
||||
@ -31,6 +37,9 @@ class NoneReduce(Reduce[None, None]):
|
||||
async def reduce(self, out: None, element: None) -> None:
|
||||
return None
|
||||
|
||||
async def merge(self, left: None, right: None) -> None:
|
||||
return None
|
||||
|
||||
|
||||
class PrintReduce(Reduce[tuple[Nullable[HashPoint], HashPoint], None]):
|
||||
def __init__(self):
|
||||
@ -45,23 +54,60 @@ class PrintReduce(Reduce[tuple[Nullable[HashPoint], HashPoint], None]):
|
||||
)
|
||||
return None
|
||||
|
||||
async def merge(self, left: None, right: None) -> None:
|
||||
return None
|
||||
|
||||
def loose(self) -> Reduce[tuple[Nullable[HashPoint], HashPoint], None]:
|
||||
return self
|
||||
|
||||
|
||||
class TestBridge(unittest.IsolatedAsyncioTestCase):
|
||||
async def test_stack_bridge(self):
|
||||
@classmethod
|
||||
def element_of(cls, stack: Stack[Plain]) -> HashPoint[Plain]:
|
||||
return stack.element
|
||||
|
||||
@classmethod
|
||||
def element_mapper(cls) -> Mapper[Stack[Plain], HashPoint[Plain]]:
|
||||
return CallableMapper(cls.element_of)
|
||||
|
||||
@classmethod
|
||||
async def bridge(cls) -> Reducer[SequenceDispatcher[HashPoint[Plain], None], None]:
|
||||
hp: HashPoint[Stack[Plain]] = Stack.off(Plain.factory(), [Plain(b'A'), Plain(b'B'), Plain(b'C')]).hashpoint()
|
||||
bridge = StackBridge(hp)
|
||||
print(await hash_point_format(hp, 0))
|
||||
bridge = StackBridge(hp).over_elements()
|
||||
return bridge
|
||||
|
||||
async def test_stack_bridge(self):
|
||||
set_gather_linear()
|
||||
bridge = await self.bridge()
|
||||
assert_none_strict(
|
||||
await bridge.reduce(MapReduce(DispatchMapper(PrintDispatch().loose()).loose(), NoneReduce()).loose())
|
||||
)
|
||||
|
||||
async def test_stacked(self):
|
||||
hp: HashPoint[Stack[Plain]] = Stack.off(Plain.factory(), [Plain(b'A'), Plain(b'B'), Plain(b'C')]).hashpoint()
|
||||
bridge = StackBridge(hp)
|
||||
print(await hash_point_format(hp, 0))
|
||||
set_gather_linear()
|
||||
bridge = await self.bridge()
|
||||
assert_none_strict(
|
||||
await StackedReducer(bridge.loose()).loose().reduce(PrintReduce().loose())
|
||||
await StackedReducer(bridge).loose().reduce(PrintReduce().loose())
|
||||
)
|
||||
|
||||
@classmethod
|
||||
async def abt_of(cls, *plains: bytes) -> ActiveBinaryTree[Plain, Integer]:
|
||||
abt: ActiveBinaryTree[Plain, Integer] = ActiveBinaryTree.empty(AVL(PlainComparator(Fail())), Plain.factory())
|
||||
for plain in plains:
|
||||
abt = await abt.add(HashPoint.of(Plain(plain)))
|
||||
return abt
|
||||
|
||||
async def test_flowstandard(self):
|
||||
set_gather_linear()
|
||||
set0 = {os.urandom(8).hex().encode() for _ in range(64)}
|
||||
abt0: ActiveBinaryTree[Plain, Integer] = await self.abt_of(*set0)
|
||||
abt1: ActiveBinaryTree[Plain, Integer] = await abt0.add(HashPoint.of(Plain(os.urandom(8).hex().encode())))
|
||||
fs0 = FlowStandard(abt0.protocolized())
|
||||
fs1 = FlowStandard(abt1.protocolized())
|
||||
_t = time.process_time()
|
||||
await fs0.verify_subset(UnitReducer(fs1))
|
||||
with self.assertWarns(RuntimeWarning):
|
||||
with self.assertRaises(ValueError):
|
||||
await fs1.verify_subset(UnitReducer(fs0))
|
||||
print('verification time', time.process_time() - _t)
|
||||
|
Loading…
Reference in New Issue
Block a user