flow trace
This commit is contained in:
parent
86f460989e
commit
d72c714f81
4
plot.py
4
plot.py
@ -23,7 +23,7 @@ def plot(fn: str):
|
|||||||
plt.ylabel('concurrency (1)')
|
plt.ylabel('concurrency (1)')
|
||||||
|
|
||||||
with open(fn) as file:
|
with open(fn) as file:
|
||||||
jsonified: dict = json.load(file)
|
jsonified: dict[str] = json.load(file)
|
||||||
|
|
||||||
def logplot(plot_function, metric: str, **kwargs):
|
def logplot(plot_function, metric: str, **kwargs):
|
||||||
if (log := jsonified.pop(metric, None)) is not None:
|
if (log := jsonified.pop(metric, None)) is not None:
|
||||||
@ -35,11 +35,13 @@ def plot(fn: str):
|
|||||||
logplot(plt.plot, 'Stack:list:concurrency')
|
logplot(plt.plot, 'Stack:list:concurrency')
|
||||||
logplot(plt.scatter, 'ActiveBinaryTree:add:entry', c='tomato', zorder=100, s=.5)
|
logplot(plt.scatter, 'ActiveBinaryTree:add:entry', c='tomato', zorder=100, s=.5)
|
||||||
logplot(plt.scatter, 'ActiveBinaryTree:add:exit', c='gold', zorder=99, s=.5)
|
logplot(plt.scatter, 'ActiveBinaryTree:add:exit', c='gold', zorder=99, s=.5)
|
||||||
|
|
||||||
plt.legend()
|
plt.legend()
|
||||||
plt.show()
|
plt.show()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
if Path('trace/latest.json').exists():
|
||||||
plot('trace/latest.json')
|
plot('trace/latest.json')
|
||||||
for fp in list(Path('trace').glob('*.json')):
|
for fp in list(Path('trace').glob('*.json')):
|
||||||
if fp != Path('trace/latest.json'):
|
if fp != Path('trace/latest.json'):
|
||||||
|
@ -1,11 +1,11 @@
|
|||||||
__all__ = (
|
__all__ = (
|
||||||
'BinaryAction',
|
'BinaryAction',
|
||||||
'CompareAction',
|
'CompareAction',
|
||||||
'AddAction', 'RemoveAction', 'ContainsAction',
|
'AddAction', 'RemoveAction', 'ContainsAction', 'SplitAction', 'UnionAction',
|
||||||
'Symmetric', 'InnerOuter', 'OuterInner',
|
'Symmetric', 'InnerOuter', 'OuterInner',
|
||||||
)
|
)
|
||||||
|
|
||||||
from .binaryaction import BinaryAction
|
from .binaryaction import BinaryAction
|
||||||
from .compareaction import CompareAction
|
from .compareaction import CompareAction
|
||||||
from .stdactions import AddAction, ContainsAction, RemoveAction
|
from .stdactions import AddAction, ContainsAction, RemoveAction, SplitAction, UnionAction
|
||||||
from .symmetric import InnerOuter, OuterInner, Symmetric
|
from .symmetric import InnerOuter, OuterInner, Symmetric
|
||||||
|
@ -2,10 +2,11 @@ from typing import Generic, TypeVar
|
|||||||
|
|
||||||
from rainbowadn.collection.comparison import *
|
from rainbowadn.collection.comparison import *
|
||||||
from rainbowadn.collection.trees.binary.core import *
|
from rainbowadn.collection.trees.binary.core import *
|
||||||
|
from rainbowadn.core import *
|
||||||
from .binaryaction import *
|
from .binaryaction import *
|
||||||
from .compareaction import *
|
from .compareaction import *
|
||||||
|
|
||||||
__all__ = ('AddAction', 'RemoveAction', 'ContainsAction',)
|
__all__ = ('AddAction', 'RemoveAction', 'ContainsAction', 'SplitAction', 'UnionAction',)
|
||||||
|
|
||||||
TreeType = TypeVar('TreeType')
|
TreeType = TypeVar('TreeType')
|
||||||
ActiveKeyType = TypeVar('ActiveKeyType')
|
ActiveKeyType = TypeVar('ActiveKeyType')
|
||||||
@ -180,3 +181,67 @@ class ContainsAction(
|
|||||||
) -> bool:
|
) -> bool:
|
||||||
assert isinstance(protocolized, BinaryProtocolized)
|
assert isinstance(protocolized, BinaryProtocolized)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class SplitAction(
|
||||||
|
CompareAction[
|
||||||
|
ActiveKeyType,
|
||||||
|
MetaDataType,
|
||||||
|
TreeType,
|
||||||
|
tuple[TreeType, TreeType]
|
||||||
|
],
|
||||||
|
Generic[ActiveKeyType, MetaDataType, TreeType]
|
||||||
|
):
|
||||||
|
async def on_equal(
|
||||||
|
self, case: ProtocolizedBinarySplit[ActiveKeyType, MetaDataType, TreeType], equal: Equal
|
||||||
|
) -> tuple[TreeType, TreeType]:
|
||||||
|
if isinstance(equal, Replace):
|
||||||
|
return case.split.treel, case.split.treer
|
||||||
|
else:
|
||||||
|
raise TypeError
|
||||||
|
|
||||||
|
async def on_left(
|
||||||
|
self, case: ProtocolizedBinarySplit[ActiveKeyType, MetaDataType, TreeType]
|
||||||
|
) -> tuple[TreeType, TreeType]:
|
||||||
|
ll, lr = await self.on(case.protocolizedl())
|
||||||
|
return ll, await case.protocol.tree(lr, case.split.treer, case.split.key)
|
||||||
|
|
||||||
|
async def on_right(
|
||||||
|
self, case: ProtocolizedBinarySplit[ActiveKeyType, MetaDataType, TreeType]
|
||||||
|
) -> tuple[TreeType, TreeType]:
|
||||||
|
rl, rr = await self.on(case.protocolizedr())
|
||||||
|
return await case.protocol.tree(case.split.treel, rl, case.split.key), rr
|
||||||
|
|
||||||
|
async def on_null(
|
||||||
|
self, protocolized: BinaryProtocolized[ActiveKeyType, MetaDataType, TreeType]
|
||||||
|
) -> tuple[TreeType, TreeType]:
|
||||||
|
return protocolized.tree, protocolized.tree
|
||||||
|
|
||||||
|
|
||||||
|
class UnionAction(
|
||||||
|
BinaryAction[
|
||||||
|
ActiveKeyType,
|
||||||
|
MetaDataType,
|
||||||
|
TreeType,
|
||||||
|
TreeType
|
||||||
|
],
|
||||||
|
Generic[ActiveKeyType, MetaDataType, TreeType]
|
||||||
|
):
|
||||||
|
def __init__(self, protocolized: BinaryProtocolized[ActiveKeyType, MetaDataType, TreeType]):
|
||||||
|
assert isinstance(protocolized, BinaryProtocolized)
|
||||||
|
self.protocolized = protocolized
|
||||||
|
|
||||||
|
async def on_null(self, protocolized: BinaryProtocolized[ActiveKeyType, MetaDataType, TreeType]) -> TreeType:
|
||||||
|
return self.protocolized.tree
|
||||||
|
|
||||||
|
async def on_split(self, case: ProtocolizedBinarySplit[ActiveKeyType, MetaDataType, TreeType]) -> TreeType:
|
||||||
|
treel: TreeType
|
||||||
|
treer: TreeType
|
||||||
|
treel, treer = await SplitAction(case.split.key).on(self.protocolized)
|
||||||
|
unionl: TreeType
|
||||||
|
unionr: TreeType
|
||||||
|
unionl, unionr = await gather(
|
||||||
|
UnionAction(case.protocolizedl()).on(BinaryProtocolized(case.protocol, treel)),
|
||||||
|
UnionAction(case.protocolizedr()).on(BinaryProtocolized(case.protocol, treer)),
|
||||||
|
)
|
||||||
|
return await case.protocol.tree(unionl, unionr, case.split.key)
|
||||||
|
@ -82,6 +82,17 @@ class ActiveBinaryTree(
|
|||||||
assert isinstance(key, HashPoint)
|
assert isinstance(key, HashPoint)
|
||||||
return await ContainsAction(key).on(self.protocolized())
|
return await ContainsAction(key).on(self.protocolized())
|
||||||
|
|
||||||
|
async def split(self, key: HashPoint[ActiveKeyType]) -> tuple[
|
||||||
|
'ActiveBinaryTree[ActiveKeyType, MetaDataType]',
|
||||||
|
'ActiveBinaryTree[ActiveKeyType, MetaDataType]',
|
||||||
|
]:
|
||||||
|
return await SplitAction(key).on(self.protocolized())
|
||||||
|
|
||||||
|
async def union(
|
||||||
|
self, other: 'ActiveBinaryTree[ActiveKeyType, MetaDataType]'
|
||||||
|
) -> 'ActiveBinaryTree[ActiveKeyType, MetaDataType]':
|
||||||
|
return await UnionAction(other.protocolized()).on(self.protocolized())
|
||||||
|
|
||||||
def loose(self) -> CollectionInterface[
|
def loose(self) -> CollectionInterface[
|
||||||
BinaryTree[KeyMetadata[ActiveKeyType, MetaDataType]]
|
BinaryTree[KeyMetadata[ActiveKeyType, MetaDataType]]
|
||||||
]:
|
]:
|
||||||
|
@ -2,7 +2,7 @@ __all__ = (
|
|||||||
'assert_true', 'assert_trues', 'assert_false', 'assert_none', 'assert_none_strict', 'assert_eq',
|
'assert_true', 'assert_trues', 'assert_false', 'assert_none', 'assert_none_strict', 'assert_eq',
|
||||||
'ExtendableResolver',
|
'ExtendableResolver',
|
||||||
'gather', 'asum', 'alist', 'set_gather_asyncio', 'set_gather_linear',
|
'gather', 'asum', 'alist', 'set_gather_asyncio', 'set_gather_linear',
|
||||||
'hash_point_format', 'tabulate',
|
'hash_point_format', 'tabulate', 'enable_newline', 'disable_newline',
|
||||||
'HashPoint',
|
'HashPoint',
|
||||||
'HashResolver',
|
'HashResolver',
|
||||||
'LocalMetaOrigin',
|
'LocalMetaOrigin',
|
||||||
@ -21,7 +21,7 @@ from .asserts import assert_eq, assert_false, assert_none, assert_none_strict, a
|
|||||||
from .extendableresolver import ExtendableResolver
|
from .extendableresolver import ExtendableResolver
|
||||||
from .gather import alist, asum, gather, set_gather_asyncio, set_gather_linear
|
from .gather import alist, asum, gather, set_gather_asyncio, set_gather_linear
|
||||||
from .hashpoint import HashPoint
|
from .hashpoint import HashPoint
|
||||||
from .hashpointformat import hash_point_format, tabulate
|
from .hashpointformat import disable_newline, enable_newline, hash_point_format, tabulate
|
||||||
from .hashresolver import HashResolver
|
from .hashresolver import HashResolver
|
||||||
from .localmetaorigin import LocalMetaOrigin
|
from .localmetaorigin import LocalMetaOrigin
|
||||||
from .localorigin import LocalOrigin
|
from .localorigin import LocalOrigin
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
from typing import AsyncIterable, TypeVar
|
from typing import Any, AsyncIterable, Awaitable, Coroutine, TypeVar, overload
|
||||||
|
|
||||||
__all__ = ('gather', 'asum', 'alist', 'set_gather_asyncio', 'set_gather_linear',)
|
__all__ = ('gather', 'asum', 'alist', 'set_gather_asyncio', 'set_gather_linear',)
|
||||||
|
|
||||||
@ -20,6 +20,51 @@ def set_gather_linear():
|
|||||||
_gather = _local_gather
|
_gather = _local_gather
|
||||||
|
|
||||||
|
|
||||||
|
T0 = TypeVar('T0')
|
||||||
|
T1 = TypeVar('T1')
|
||||||
|
T2 = TypeVar('T2')
|
||||||
|
T3 = TypeVar('T3')
|
||||||
|
T4 = TypeVar('T4')
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def gather(
|
||||||
|
a0: Coroutine[Any, Any, T0],
|
||||||
|
a1: Coroutine[Any, Any, T1],
|
||||||
|
) -> Awaitable[tuple[T0, T1]]: ...
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def gather(
|
||||||
|
a0: Coroutine[Any, Any, T0],
|
||||||
|
a1: Coroutine[Any, Any, T1],
|
||||||
|
a2: Coroutine[Any, Any, T2],
|
||||||
|
) -> Awaitable[tuple[T0, T1, T2]]: ...
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def gather(
|
||||||
|
a0: Coroutine[Any, Any, T0],
|
||||||
|
a1: Coroutine[Any, Any, T1],
|
||||||
|
a2: Coroutine[Any, Any, T2],
|
||||||
|
a3: Coroutine[Any, Any, T3],
|
||||||
|
) -> Awaitable[tuple[T0, T1, T2, T3]]: ...
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def gather(
|
||||||
|
a0: Coroutine[Any, Any, T0],
|
||||||
|
a1: Coroutine[Any, Any, T1],
|
||||||
|
a2: Coroutine[Any, Any, T2],
|
||||||
|
a3: Coroutine[Any, Any, T3],
|
||||||
|
a4: Coroutine[Any, Any, T4],
|
||||||
|
) -> Awaitable[tuple[T0, T1, T2, T3, T4]]: ...
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def gather(*args): ...
|
||||||
|
|
||||||
|
|
||||||
def gather(*args):
|
def gather(*args):
|
||||||
return _gather(*args)
|
return _gather(*args)
|
||||||
|
|
||||||
|
@ -2,7 +2,7 @@ from .hashpoint import *
|
|||||||
from .mentionable import *
|
from .mentionable import *
|
||||||
from .recursivementionable import *
|
from .recursivementionable import *
|
||||||
|
|
||||||
__all__ = ('hash_point_format', 'tabulate',)
|
__all__ = ('hash_point_format', 'tabulate', 'enable_newline', 'disable_newline',)
|
||||||
|
|
||||||
|
|
||||||
async def hash_point_format(hash_point: HashPoint, tab: int) -> str:
|
async def hash_point_format(hash_point: HashPoint, tab: int) -> str:
|
||||||
@ -19,6 +19,16 @@ async def hash_point_format(hash_point: HashPoint, tab: int) -> str:
|
|||||||
newline = False
|
newline = False
|
||||||
|
|
||||||
|
|
||||||
|
def enable_newline():
|
||||||
|
global newline
|
||||||
|
newline = True
|
||||||
|
|
||||||
|
|
||||||
|
def disable_newline():
|
||||||
|
global newline
|
||||||
|
newline = False
|
||||||
|
|
||||||
|
|
||||||
def tabulate(tab: int) -> str:
|
def tabulate(tab: int) -> str:
|
||||||
assert isinstance(tab, int)
|
assert isinstance(tab, int)
|
||||||
if newline:
|
if newline:
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
__all__ = (
|
__all__ = (
|
||||||
|
'ConstMapper',
|
||||||
'UnitReducer',
|
'UnitReducer',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from ._constmapper import ConstMapper
|
||||||
from ._unitreducer import UnitReducer
|
from ._unitreducer import UnitReducer
|
||||||
|
@ -1,3 +1,13 @@
|
|||||||
__all__ = ('FlowStandard',)
|
__all__ = (
|
||||||
|
'BankBlock',
|
||||||
|
'FlowCheque',
|
||||||
|
'FlowIterate',
|
||||||
|
'FlowStandard',
|
||||||
|
'FlowCoinData', 'FlowCoin', 'FlowTransactionData', 'FlowTransaction',
|
||||||
|
)
|
||||||
|
|
||||||
|
from ._bankblock import BankBlock
|
||||||
|
from ._flowcheque import FlowCheque
|
||||||
|
from ._flowiterate import FlowIterate
|
||||||
from ._flowstandard import FlowStandard
|
from ._flowstandard import FlowStandard
|
||||||
|
from ._flowtransaction import FlowCoin, FlowCoinData, FlowTransaction, FlowTransactionData
|
||||||
|
63
rainbowadn/flow13/_bankblock.py
Normal file
63
rainbowadn/flow13/_bankblock.py
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
from typing import TypeAlias
|
||||||
|
|
||||||
|
from rainbowadn.collection.pair import *
|
||||||
|
from rainbowadn.core import *
|
||||||
|
from rainbowadn.flow.verification.core import *
|
||||||
|
from rainbowadn.nullability import *
|
||||||
|
from ._bankflow import *
|
||||||
|
from ._flowbank import *
|
||||||
|
from ._flowblock import *
|
||||||
|
from ._flowcheque import *
|
||||||
|
from ._flowstandard import *
|
||||||
|
|
||||||
|
__all__ = ('BankBlock',)
|
||||||
|
|
||||||
|
Index: TypeAlias = FlowStandard[FlowBlock[Pair[FlowCheque, FlowBank]]]
|
||||||
|
|
||||||
|
|
||||||
|
class BankBlock:
|
||||||
|
def __init__(self, reference: NullableReference[FlowBlock[Pair[FlowCheque, FlowBank]]]):
|
||||||
|
assert isinstance(reference, NullableReference)
|
||||||
|
self.reference = reference
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def flow(cls) -> 'BankFlow':
|
||||||
|
return BankFlow(FlowBank.empty())
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def link_factory(cls) -> RainbowFactory[Pair[FlowCheque, FlowBank]]:
|
||||||
|
return PairFactory(FlowCheque.factory(), FlowBank.factory()).loose()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def empty(cls) -> 'BankBlock':
|
||||||
|
return cls(
|
||||||
|
NullableReference(
|
||||||
|
Null(),
|
||||||
|
FlowBlockFactory(cls.link_factory()).loose()
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def verification(cls) -> Verification[Index]:
|
||||||
|
return FlowBlockVerification(cls.flow().link_verification()).loose()
|
||||||
|
|
||||||
|
async def verify(self) -> bool:
|
||||||
|
assert_true(await self.verification().verify(await FlowBlock.outer_of(self.link_factory(), self.reference)))
|
||||||
|
return True
|
||||||
|
|
||||||
|
async def add(self, cheque: FlowCheque) -> 'BankBlock':
|
||||||
|
assert isinstance(cheque, FlowCheque)
|
||||||
|
previous_link: Nullable[Pair[FlowCheque, FlowBank]] = await FlowBlock.link_of(self.reference)
|
||||||
|
assert isinstance(previous_link, Nullable)
|
||||||
|
previous_bank: Nullable[FlowBank]
|
||||||
|
if previous_link.null():
|
||||||
|
previous_bank = Null()
|
||||||
|
else:
|
||||||
|
previous_bank = NotNull(await (previous_link.resolve()).element1.resolve())
|
||||||
|
assert isinstance(previous_link, Nullable)
|
||||||
|
bank: FlowBank = await self.flow().add(previous_bank, cheque)
|
||||||
|
assert isinstance(bank, FlowBank)
|
||||||
|
link: Pair[FlowCheque, FlowBank] = Pair(HashPoint.of(cheque), HashPoint.of(bank))
|
||||||
|
block: FlowBlock[Pair[FlowCheque, FlowBank]] = await FlowBlock.add_to(self.reference, HashPoint.of(link))
|
||||||
|
assert isinstance(block, FlowBlock)
|
||||||
|
return BankBlock(NullableReference.off(block))
|
@ -1,17 +1,21 @@
|
|||||||
|
from rainbowadn.collection.pair import *
|
||||||
from rainbowadn.core import *
|
from rainbowadn.core import *
|
||||||
from rainbowadn.flow.bridge import *
|
from rainbowadn.flow.bridge import *
|
||||||
|
from rainbowadn.flow.core import *
|
||||||
from rainbowadn.flow.primitive import *
|
from rainbowadn.flow.primitive import *
|
||||||
from rainbowadn.flow.verification.core import *
|
from rainbowadn.flow.verification.core import *
|
||||||
|
from rainbowadn.flow.verification.stateverification import *
|
||||||
from rainbowadn.nullability import *
|
from rainbowadn.nullability import *
|
||||||
from ._flowbank import *
|
from ._flowbank import *
|
||||||
from ._flowcheque import *
|
from ._flowcheque import *
|
||||||
from ._flowstandard import *
|
from ._flowstandard import *
|
||||||
from ._flowtransaction import *
|
from ._flowtransaction import *
|
||||||
|
from ._flowunion import *
|
||||||
|
|
||||||
__all__ = ('FlowBankVerification',)
|
__all__ = ('BankFlow',)
|
||||||
|
|
||||||
|
|
||||||
class FlowBankVerification(
|
class BankFlow(
|
||||||
Verification[tuple[Nullable[FlowBank], FlowCheque, FlowBank]],
|
Verification[tuple[Nullable[FlowBank], FlowCheque, FlowBank]],
|
||||||
):
|
):
|
||||||
def __init__(self, initial: FlowBank):
|
def __init__(self, initial: FlowBank):
|
||||||
@ -56,36 +60,28 @@ class FlowBankVerification(
|
|||||||
assert isinstance(bank, FlowBank)
|
assert isinstance(bank, FlowBank)
|
||||||
|
|
||||||
async def verify_unique_minted():
|
async def verify_unique_minted():
|
||||||
previous_minted: FlowStandard[FlowCoin] = await previous.minted()
|
|
||||||
cheque_minted: FlowStandard[FlowCoin] = await cheque.minted()
|
|
||||||
bank_minted: FlowStandard[FlowCoin] = await bank.minted()
|
|
||||||
assert_true(
|
assert_true(
|
||||||
await cls._verify_disjoint_union(
|
await cls._verify_disjoint_union(
|
||||||
previous_minted,
|
previous.minted,
|
||||||
cheque_minted,
|
cheque.minted,
|
||||||
bank_minted,
|
bank.minted,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
async def verify_unique_used():
|
async def verify_unique_used():
|
||||||
previous_used: FlowStandard[FlowCoin] = await previous.used()
|
|
||||||
cheque_used: FlowStandard[FlowCoin] = await cheque.used()
|
|
||||||
bank_used: FlowStandard[FlowCoin] = await bank.used()
|
|
||||||
assert_true(
|
assert_true(
|
||||||
await cls._verify_disjoint_union(
|
await cls._verify_disjoint_union(
|
||||||
previous_used,
|
previous.used,
|
||||||
cheque_used,
|
cheque.used,
|
||||||
bank_used,
|
bank.used,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
async def verify_used_were_minted():
|
async def verify_used_were_minted():
|
||||||
bank_minted: FlowStandard[FlowCoin] = await bank.minted()
|
|
||||||
cheque_used: FlowStandard[FlowCoin] = await cheque.used()
|
|
||||||
assert_true(
|
assert_true(
|
||||||
await cheque_used.verify_subset(UnitReducer(bank_minted))
|
await cheque.used.verify_subset(UnitReducer(bank.minted))
|
||||||
)
|
)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@ -119,3 +115,40 @@ class FlowBankVerification(
|
|||||||
assert isinstance(previous_bank, FlowBank)
|
assert isinstance(previous_bank, FlowBank)
|
||||||
assert_true(await self._verify(previous_bank, cheque, bank))
|
assert_true(await self._verify(previous_bank, cheque, bank))
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
async def add(self, previous: Nullable[FlowBank], cheque: FlowCheque) -> FlowBank:
|
||||||
|
assert isinstance(previous, Nullable)
|
||||||
|
assert isinstance(cheque, FlowCheque)
|
||||||
|
previous_bank: FlowBank
|
||||||
|
if previous.null():
|
||||||
|
previous_bank = self.initial
|
||||||
|
else:
|
||||||
|
previous_bank = previous.resolve()
|
||||||
|
assert isinstance(previous_bank, FlowBank)
|
||||||
|
minted: FlowStandard[FlowCoin]
|
||||||
|
used: FlowStandard[FlowCoin]
|
||||||
|
minted, used = await gather(
|
||||||
|
flow_union(previous_bank.minted, cheque.minted),
|
||||||
|
flow_union(previous_bank.used, cheque.used),
|
||||||
|
)
|
||||||
|
return FlowBank(minted, used)
|
||||||
|
|
||||||
|
def link_verification(self) -> Verification[
|
||||||
|
tuple[Nullable[Pair[FlowCheque, FlowBank]], Pair[FlowCheque, FlowBank]]
|
||||||
|
]:
|
||||||
|
class Decomposition(Mapper[Pair[FlowCheque, FlowBank], tuple[FlowCheque, FlowBank]]):
|
||||||
|
async def map(self, element: Pair[FlowCheque, FlowBank]) -> tuple[FlowCheque, FlowBank]:
|
||||||
|
cheque: FlowCheque
|
||||||
|
bank: FlowBank
|
||||||
|
cheque, bank = await gather(
|
||||||
|
element.element0.resolve(),
|
||||||
|
element.element1.resolve(),
|
||||||
|
)
|
||||||
|
assert isinstance(cheque, FlowCheque)
|
||||||
|
assert isinstance(bank, FlowBank)
|
||||||
|
return cheque, bank
|
||||||
|
|
||||||
|
return StateVerification(
|
||||||
|
Decomposition(),
|
||||||
|
self
|
||||||
|
)
|
@ -1,12 +1,51 @@
|
|||||||
|
from typing import Iterable
|
||||||
|
|
||||||
|
from rainbowadn.core import *
|
||||||
from ._flowstandard import *
|
from ._flowstandard import *
|
||||||
from ._flowtransaction import *
|
from ._flowtransaction import *
|
||||||
|
|
||||||
__all__ = ('FlowBank',)
|
__all__ = ('FlowBank',)
|
||||||
|
|
||||||
|
|
||||||
class FlowBank:
|
class FlowBank(StaticMentionable, RecursiveMentionable):
|
||||||
async def minted(self) -> FlowStandard[FlowCoin]:
|
@classmethod
|
||||||
raise NotImplementedError
|
def from_bytes(cls, source: bytes, resolver: HashResolver) -> 'FlowBank':
|
||||||
|
return FlowBank(
|
||||||
|
FlowStandardFactory.of(FlowCoin.factory()).from_bytes(
|
||||||
|
source[:HashPoint.HASH_LENGTH], resolver
|
||||||
|
),
|
||||||
|
FlowStandardFactory.of(FlowCoin.factory()).from_bytes(
|
||||||
|
source[HashPoint.HASH_LENGTH:], resolver
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
async def used(self) -> FlowStandard[FlowCoin]:
|
def points(self) -> Iterable[HashPoint]:
|
||||||
raise NotImplementedError
|
return [*self.minted.points(), *self.used.points()]
|
||||||
|
|
||||||
|
def __bytes__(self):
|
||||||
|
return bytes(self.minted) + bytes(self.used)
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
minted: FlowStandard[FlowCoin],
|
||||||
|
used: FlowStandard[FlowCoin],
|
||||||
|
):
|
||||||
|
assert isinstance(minted, FlowStandard)
|
||||||
|
assert isinstance(used, FlowStandard)
|
||||||
|
self.minted = minted
|
||||||
|
self.used = used
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def empty(cls) -> 'FlowBank':
|
||||||
|
return FlowBank(
|
||||||
|
FlowStandardFactory.empty(FlowCoin.factory()),
|
||||||
|
FlowStandardFactory.empty(FlowCoin.factory()),
|
||||||
|
)
|
||||||
|
|
||||||
|
async def str(self, tab: int) -> str:
|
||||||
|
assert isinstance(tab, int)
|
||||||
|
return f'(' \
|
||||||
|
f'{tabulate(tab + 1)}bank' \
|
||||||
|
f'{tabulate(tab + 1)}(minted)' \
|
||||||
|
f'{tabulate(tab + 1)}(used)' \
|
||||||
|
f'{tabulate(tab)})'
|
||||||
|
182
rainbowadn/flow13/_flowblock.py
Normal file
182
rainbowadn/flow13/_flowblock.py
Normal file
@ -0,0 +1,182 @@
|
|||||||
|
from typing import Generic, Iterable, TypeAlias, TypeVar
|
||||||
|
|
||||||
|
from rainbowadn.core import *
|
||||||
|
from rainbowadn.flow.verification.core import *
|
||||||
|
from rainbowadn.nullability import *
|
||||||
|
from ._flowstandard import *
|
||||||
|
|
||||||
|
__all__ = ('FlowBlock', 'FlowBlockFactory', 'FlowBlockVerification',)
|
||||||
|
|
||||||
|
LinkT = TypeVar('LinkT')
|
||||||
|
|
||||||
|
FBL: TypeAlias = 'FlowBlock[LinkT]'
|
||||||
|
Index: TypeAlias = FlowStandard[FBL]
|
||||||
|
|
||||||
|
|
||||||
|
class FlowBlock(Generic[LinkT], RecursiveMentionable):
|
||||||
|
def points(self) -> Iterable[HashPoint]:
|
||||||
|
return [*self.previous.points(), *self.index.points(), self.link]
|
||||||
|
|
||||||
|
def __bytes__(self):
|
||||||
|
return bytes(self.previous) + bytes(self.index) + bytes(self.link)
|
||||||
|
|
||||||
|
def __factory__(self) -> RainbowFactory[FBL]:
|
||||||
|
return FlowBlockFactory(self.link.factory)
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
previous: NullableReference[FBL],
|
||||||
|
index: Index,
|
||||||
|
link: HashPoint[LinkT],
|
||||||
|
):
|
||||||
|
assert isinstance(previous, NullableReference)
|
||||||
|
assert isinstance(index, FlowStandard)
|
||||||
|
self.previous = previous
|
||||||
|
self.index = index
|
||||||
|
self.link = link
|
||||||
|
|
||||||
|
async def outer(self) -> Index:
|
||||||
|
return FlowStandard(
|
||||||
|
(
|
||||||
|
await self.index.protocolized.tree.add(
|
||||||
|
HashPoint.of(self)
|
||||||
|
)
|
||||||
|
).protocolized()
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def outer_of(cls, factory: RainbowFactory[LinkT], reference: NullableReference[FBL]) -> Index:
|
||||||
|
if reference.null():
|
||||||
|
return FlowStandardFactory.empty(FlowBlockFactory(factory))
|
||||||
|
else:
|
||||||
|
return await (await reference.resolve()).outer()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def link_of(cls, reference: NullableReference[FBL]) -> Nullable[LinkT]:
|
||||||
|
if reference.null():
|
||||||
|
return Null()
|
||||||
|
else:
|
||||||
|
return NotNull(await (await reference.resolve()).link.resolve())
|
||||||
|
|
||||||
|
async def add(self, link: HashPoint[LinkT]) -> FBL:
|
||||||
|
return FlowBlock(
|
||||||
|
NullableReference.off(self),
|
||||||
|
await self.outer(),
|
||||||
|
link
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def add_to(cls, reference: NullableReference[FBL], link: HashPoint[LinkT]) -> FBL:
|
||||||
|
return FlowBlock(
|
||||||
|
reference,
|
||||||
|
await cls.outer_of(link.factory, reference),
|
||||||
|
link
|
||||||
|
)
|
||||||
|
|
||||||
|
async def str(self, tab: int) -> str:
|
||||||
|
assert isinstance(tab, int)
|
||||||
|
previous_str: str
|
||||||
|
link_str: str
|
||||||
|
previous_str, link_str = await gather(
|
||||||
|
self.previous.str(tab),
|
||||||
|
hash_point_format(self.link, tab)
|
||||||
|
)
|
||||||
|
assert isinstance(previous_str, str)
|
||||||
|
assert isinstance(link_str, str)
|
||||||
|
return f'{previous_str}' \
|
||||||
|
f'{tabulate(tab)}(index)' \
|
||||||
|
f'{tabulate(tab)}{link_str}'
|
||||||
|
|
||||||
|
|
||||||
|
class FlowBlockFactory(RainbowFactory[FBL], Generic[LinkT]):
|
||||||
|
def __init__(self, factory: RainbowFactory[LinkT]):
|
||||||
|
assert isinstance(factory, RainbowFactory)
|
||||||
|
self.factory = factory
|
||||||
|
|
||||||
|
def from_bytes(self, source: bytes, resolver: HashResolver) -> FBL:
|
||||||
|
assert isinstance(source, bytes)
|
||||||
|
assert isinstance(resolver, HashResolver)
|
||||||
|
return FlowBlock(
|
||||||
|
NullableReferenceFactory(self).from_bytes(source[:HashPoint.HASH_LENGTH], resolver),
|
||||||
|
FlowStandardFactory.of(self).from_bytes(source[HashPoint.HASH_LENGTH:2 * HashPoint.HASH_LENGTH], resolver),
|
||||||
|
ResolverOrigin(self.factory, source[2 * HashPoint.HASH_LENGTH:], resolver).hash_point(),
|
||||||
|
)
|
||||||
|
|
||||||
|
def loose(self) -> RainbowFactory[FBL]:
|
||||||
|
return self
|
||||||
|
|
||||||
|
|
||||||
|
class FlowBlockIndexedVerification(
|
||||||
|
Verification[FBL],
|
||||||
|
Generic[LinkT]
|
||||||
|
):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
index: Index,
|
||||||
|
verification: Verification[tuple[Nullable[LinkT], LinkT]],
|
||||||
|
):
|
||||||
|
assert isinstance(index, FlowStandard)
|
||||||
|
assert isinstance(verification, Verification)
|
||||||
|
self.index = index
|
||||||
|
self.verification = verification
|
||||||
|
|
||||||
|
async def verify(self, element: FBL) -> bool:
|
||||||
|
link = await element.link.resolve()
|
||||||
|
if element.previous.null():
|
||||||
|
assert_trues(
|
||||||
|
await gather(
|
||||||
|
self.verification.verify((Null(), link)),
|
||||||
|
element.index.verify_empty(),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
previous: FBL = await element.previous.resolve()
|
||||||
|
|
||||||
|
async def verify_link() -> bool:
|
||||||
|
assert_true(await self.verification.verify((NotNull(await previous.link.resolve()), link)))
|
||||||
|
return True
|
||||||
|
|
||||||
|
async def verify_contains() -> bool:
|
||||||
|
assert_true(await self.index.contains(previous))
|
||||||
|
return True
|
||||||
|
|
||||||
|
async def verify_index() -> bool:
|
||||||
|
assert_eq(element.index, await previous.outer())
|
||||||
|
return True
|
||||||
|
|
||||||
|
assert_trues(
|
||||||
|
await gather(
|
||||||
|
verify_link(),
|
||||||
|
verify_contains(),
|
||||||
|
verify_index(),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
|
||||||
|
def loose(self) -> Verification[FBL]:
|
||||||
|
return self
|
||||||
|
|
||||||
|
|
||||||
|
class FlowBlockVerification(
|
||||||
|
Verification[Index],
|
||||||
|
Generic[LinkT]
|
||||||
|
):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
verification: Verification[tuple[Nullable[LinkT], LinkT]],
|
||||||
|
):
|
||||||
|
assert isinstance(verification, Verification)
|
||||||
|
self.verification = verification
|
||||||
|
|
||||||
|
async def verify(self, element: Index) -> bool:
|
||||||
|
assert_true(
|
||||||
|
await ReduceVerification(
|
||||||
|
FlowBlockIndexedVerification(element, self.verification)
|
||||||
|
).verify(
|
||||||
|
await element.reducer()
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
|
||||||
|
def loose(self) -> Verification[Index]:
|
||||||
|
return self
|
@ -1,7 +1,12 @@
|
|||||||
|
import itertools
|
||||||
|
from typing import Iterable
|
||||||
|
|
||||||
from rainbowadn.collection.keyvalue import *
|
from rainbowadn.collection.keyvalue import *
|
||||||
from rainbowadn.core import *
|
from rainbowadn.core import *
|
||||||
from rainbowadn.flow.core import *
|
from rainbowadn.flow.core import *
|
||||||
from rainbowadn.flow.verification.core import *
|
from rainbowadn.flow.verification.core import *
|
||||||
|
from rainbowadn.v13 import MINT_CONST
|
||||||
|
from ._flowiterate import *
|
||||||
from ._flowstandard import *
|
from ._flowstandard import *
|
||||||
from ._flowtransaction import *
|
from ._flowtransaction import *
|
||||||
|
|
||||||
@ -19,21 +24,49 @@ class ValueMapper(Mapper[FlowCoin, int]):
|
|||||||
return await element.int_value()
|
return await element.int_value()
|
||||||
|
|
||||||
|
|
||||||
class FlowCheque:
|
class FlowCheque(StaticMentionable, RecursiveMentionable):
|
||||||
async def transactions(self) -> FlowStandard[FlowTransaction]:
|
@classmethod
|
||||||
raise NotImplementedError
|
def from_bytes(cls, source: bytes, resolver: HashResolver) -> 'FlowCheque':
|
||||||
|
return FlowCheque(
|
||||||
|
FlowStandardFactory.of(FlowTransaction.factory()).from_bytes(
|
||||||
|
source[:HashPoint.HASH_LENGTH], resolver
|
||||||
|
),
|
||||||
|
FlowStandardFactory.of(FlowCoin.factory()).from_bytes(
|
||||||
|
source[HashPoint.HASH_LENGTH:2 * HashPoint.HASH_LENGTH], resolver
|
||||||
|
),
|
||||||
|
FlowStandardFactory.of(FlowCoin.factory()).from_bytes(
|
||||||
|
source[2 * HashPoint.HASH_LENGTH:3 * HashPoint.HASH_LENGTH], resolver
|
||||||
|
),
|
||||||
|
FlowStandardFactory.of(KeyValueFactory(FlowCoin.factory(), FlowTransaction.factory()).loose()).from_bytes(
|
||||||
|
source[3 * HashPoint.HASH_LENGTH:], resolver
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
async def minted(self) -> FlowStandard[FlowCoin]:
|
def points(self) -> Iterable[HashPoint]:
|
||||||
raise NotImplementedError
|
return [*self.transactions.points(), *self.minted.points(), *self.used.points(), *self.usedx.points()]
|
||||||
|
|
||||||
async def used(self) -> FlowStandard[FlowCoin]:
|
def __bytes__(self):
|
||||||
raise NotImplementedError
|
return bytes(self.transactions) + bytes(self.minted) + bytes(self.used) + bytes(self.usedx)
|
||||||
|
|
||||||
async def usedx(self) -> FlowStandard[KeyValue[FlowCoin, FlowTransaction]]:
|
def __init__(
|
||||||
raise NotImplementedError
|
self,
|
||||||
|
transactions: FlowStandard[FlowTransaction],
|
||||||
|
minted: FlowStandard[FlowCoin],
|
||||||
|
used: FlowStandard[FlowCoin],
|
||||||
|
usedx: FlowStandard[KeyValue[FlowCoin, FlowTransaction]],
|
||||||
|
):
|
||||||
|
assert isinstance(transactions, FlowStandard)
|
||||||
|
assert isinstance(minted, FlowStandard)
|
||||||
|
assert isinstance(used, FlowStandard)
|
||||||
|
assert isinstance(usedx, FlowStandard)
|
||||||
|
self.transactions = transactions
|
||||||
|
self.minted = minted
|
||||||
|
self.used = used
|
||||||
|
self.usedx = usedx
|
||||||
|
|
||||||
async def mint(self) -> int:
|
@classmethod
|
||||||
raise NotImplementedError
|
async def mint(cls) -> int:
|
||||||
|
return MINT_CONST
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
async def total_of(cls, tree: FlowStandard[FlowCoin]) -> int:
|
async def total_of(cls, tree: FlowStandard[FlowCoin]) -> int:
|
||||||
@ -45,10 +78,10 @@ class FlowCheque:
|
|||||||
return total
|
return total
|
||||||
|
|
||||||
async def total_minted(self) -> int:
|
async def total_minted(self) -> int:
|
||||||
return await self.total_of(await self.minted())
|
return await self.total_of(self.minted)
|
||||||
|
|
||||||
async def total_used(self) -> int:
|
async def total_used(self) -> int:
|
||||||
return await self.total_of(await self.used())
|
return await self.total_of(self.used)
|
||||||
|
|
||||||
async def extra(self) -> int:
|
async def extra(self) -> int:
|
||||||
mint: int
|
mint: int
|
||||||
@ -70,25 +103,25 @@ class FlowCheque:
|
|||||||
|
|
||||||
async def _verify_transactions(self) -> bool:
|
async def _verify_transactions(self) -> bool:
|
||||||
assert_true(
|
assert_true(
|
||||||
await (await self.transactions()).verify(TransactionVerification(self).loose())
|
await self.transactions.verify(TransactionVerification(self).loose())
|
||||||
)
|
)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
async def _verify_minted(self) -> bool:
|
async def _verify_minted(self) -> bool:
|
||||||
assert_true(
|
assert_true(
|
||||||
await (await self.minted()).verify(MintedVerification(self).loose())
|
await self.minted.verify(MintedVerification(self).loose())
|
||||||
)
|
)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
async def _verify_used(self) -> bool:
|
async def _verify_used(self) -> bool:
|
||||||
assert_true(
|
assert_true(
|
||||||
await (await self.used()).verify(UsedVerification(self).loose())
|
await self.used.verify(UsedVerification(self).loose())
|
||||||
)
|
)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
async def _verify_usedx(self) -> bool:
|
async def _verify_usedx(self) -> bool:
|
||||||
assert_true(
|
assert_true(
|
||||||
await (await self.usedx()).verify(UsedXVerification(self).loose())
|
await self.usedx.verify(UsedXVerification(self).loose())
|
||||||
)
|
)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@ -104,6 +137,87 @@ class FlowCheque:
|
|||||||
)
|
)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def _transaction_minted(cls, transaction: FlowTransaction) -> Iterable[FlowCoin]:
|
||||||
|
assert isinstance(transaction, FlowTransaction)
|
||||||
|
return await (await transaction.minted_reducer()).reduce(FlowIterate([]))
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def _transaction_used(cls, transaction: FlowTransaction) -> Iterable[FlowCoin]:
|
||||||
|
assert isinstance(transaction, FlowTransaction)
|
||||||
|
return await (await transaction.used_reducer()).reduce(FlowIterate([]))
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def _transaction_usedx(cls, transaction: FlowTransaction) -> Iterable[KeyValue[FlowCoin, FlowTransaction]]:
|
||||||
|
assert isinstance(transaction, FlowTransaction)
|
||||||
|
return (
|
||||||
|
KeyValue(HashPoint.of(coin), HashPoint.of(transaction))
|
||||||
|
for
|
||||||
|
coin
|
||||||
|
in
|
||||||
|
await cls._transaction_used(transaction)
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def _make_minted(cls, transactions: Iterable[FlowTransaction]) -> FlowStandard[FlowCoin]:
|
||||||
|
return await FlowStandardFactory.off(
|
||||||
|
FlowCoin.factory(), itertools.chain(
|
||||||
|
*(await gather(*(cls._transaction_minted(transaction) for transaction in transactions)))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def _make_used(cls, transactions: Iterable[FlowTransaction]) -> FlowStandard[FlowCoin]:
|
||||||
|
return await FlowStandardFactory.off(
|
||||||
|
FlowCoin.factory(), itertools.chain(
|
||||||
|
*(await gather(*(cls._transaction_used(transaction) for transaction in transactions)))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def _make_usedx(
|
||||||
|
cls,
|
||||||
|
transactions: Iterable[FlowTransaction]
|
||||||
|
) -> FlowStandard[KeyValue[FlowCoin, FlowTransaction]]:
|
||||||
|
return await FlowStandardFactory.off(
|
||||||
|
KeyValueFactory(FlowCoin.factory(), FlowTransaction.factory()).loose(), itertools.chain(
|
||||||
|
*(await gather(*(cls._transaction_usedx(transaction) for transaction in transactions)))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def make(cls, transactions: Iterable[FlowTransaction]) -> 'FlowCheque':
|
||||||
|
transactions_standard: FlowStandard[FlowTransaction]
|
||||||
|
minted: FlowStandard[FlowCoin]
|
||||||
|
used: FlowStandard[FlowCoin]
|
||||||
|
usedx: FlowStandard[KeyValue[FlowCoin, FlowTransaction]]
|
||||||
|
transactions_standard, minted, used, usedx = await gather(
|
||||||
|
FlowStandardFactory.off(FlowTransaction.factory(), transactions),
|
||||||
|
cls._make_minted(transactions),
|
||||||
|
cls._make_used(transactions),
|
||||||
|
cls._make_usedx(transactions),
|
||||||
|
)
|
||||||
|
assert isinstance(transactions_standard, FlowStandard)
|
||||||
|
assert isinstance(minted, FlowStandard)
|
||||||
|
assert isinstance(used, FlowStandard)
|
||||||
|
assert isinstance(usedx, FlowStandard)
|
||||||
|
return cls(
|
||||||
|
transactions_standard,
|
||||||
|
minted,
|
||||||
|
used,
|
||||||
|
usedx,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def str(self, tab: int) -> str:
|
||||||
|
assert isinstance(tab, int)
|
||||||
|
return f'(' \
|
||||||
|
f'{tabulate(tab + 1)}cheque' \
|
||||||
|
f'{tabulate(tab + 1)}{await self.transactions.str(tab + 1)}' \
|
||||||
|
f'{tabulate(tab + 1)}(minted)' \
|
||||||
|
f'{tabulate(tab + 1)}(used)' \
|
||||||
|
f'{tabulate(tab + 1)}(usedx)' \
|
||||||
|
f'{tabulate(tab)})'
|
||||||
|
|
||||||
|
|
||||||
class TransactionVerification(
|
class TransactionVerification(
|
||||||
Verification[FlowTransaction]
|
Verification[FlowTransaction]
|
||||||
@ -133,50 +247,27 @@ class TransactionVerification(
|
|||||||
|
|
||||||
async def _verify_transaction_minted(self, transaction: FlowTransaction) -> bool:
|
async def _verify_transaction_minted(self, transaction: FlowTransaction) -> bool:
|
||||||
assert isinstance(transaction, FlowTransaction)
|
assert isinstance(transaction, FlowTransaction)
|
||||||
minted: FlowStandard[FlowCoin]
|
|
||||||
minted_reducer: Reducer[FlowCoin, bool]
|
|
||||||
minted, minted_reducer = await gather(
|
|
||||||
self.cheque.minted(),
|
|
||||||
transaction.minted_reducer(),
|
|
||||||
)
|
|
||||||
assert isinstance(minted, FlowStandard)
|
|
||||||
assert isinstance(minted_reducer, Reducer)
|
|
||||||
assert_true(
|
assert_true(
|
||||||
await minted.verify_contains_all(
|
await self.cheque.minted.verify_contains_all(
|
||||||
minted_reducer
|
await transaction.minted_reducer()
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
async def _verify_transaction_used(self, transaction: FlowTransaction) -> bool:
|
async def _verify_transaction_used(self, transaction: FlowTransaction) -> bool:
|
||||||
assert isinstance(transaction, FlowTransaction)
|
assert isinstance(transaction, FlowTransaction)
|
||||||
minted: FlowStandard[FlowCoin]
|
|
||||||
minted_reducer: Reducer[FlowCoin, bool]
|
|
||||||
used, used_reducer = await gather(
|
|
||||||
self.cheque.used(),
|
|
||||||
transaction.used_reducer(),
|
|
||||||
)
|
|
||||||
assert isinstance(used, FlowStandard)
|
|
||||||
assert isinstance(used_reducer, Reducer)
|
|
||||||
assert_true(
|
assert_true(
|
||||||
await used.verify_contains_all(
|
await self.cheque.used.verify_contains_all(
|
||||||
used_reducer
|
await transaction.used_reducer()
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
async def _verify_transaction_usedx(self, transaction: FlowTransaction) -> bool:
|
async def _verify_transaction_usedx(self, transaction: FlowTransaction) -> bool:
|
||||||
minted: FlowStandard[KeyValue[FlowCoin, FlowTransaction]]
|
assert isinstance(transaction, FlowTransaction)
|
||||||
minted_reducer: Reducer[FlowCoin, bool]
|
|
||||||
usedx, used_reducer = await gather(
|
|
||||||
self.cheque.usedx(),
|
|
||||||
transaction.used_reducer(),
|
|
||||||
)
|
|
||||||
assert isinstance(usedx, FlowStandard)
|
|
||||||
assert isinstance(used_reducer, Reducer)
|
|
||||||
assert_true(
|
assert_true(
|
||||||
await usedx.verify_contains_all(
|
await self.cheque.usedx.verify_contains_all(
|
||||||
self.usedx_reducer(used_reducer, transaction)
|
self.usedx_reducer(await transaction.used_reducer(), transaction)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
return True
|
return True
|
||||||
@ -205,17 +296,9 @@ class MintedVerification(
|
|||||||
|
|
||||||
async def verify(self, element: FlowCoin) -> bool:
|
async def verify(self, element: FlowCoin) -> bool:
|
||||||
assert isinstance(element, FlowCoin)
|
assert isinstance(element, FlowCoin)
|
||||||
transactions: FlowStandard[FlowTransaction]
|
|
||||||
transaction: FlowTransaction
|
|
||||||
transactions, transaction = await gather(
|
|
||||||
self.cheque.transactions(),
|
|
||||||
element.transaction.resolve(),
|
|
||||||
)
|
|
||||||
assert isinstance(transactions, FlowStandard)
|
|
||||||
assert isinstance(transaction, FlowTransaction)
|
|
||||||
assert_true(
|
assert_true(
|
||||||
await transactions.contains(
|
await self.cheque.transactions.contains(
|
||||||
transaction
|
await element.transaction.resolve()
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
return True
|
return True
|
||||||
@ -234,7 +317,7 @@ class UsedVerification(
|
|||||||
async def verify(self, element: FlowCoin) -> bool:
|
async def verify(self, element: FlowCoin) -> bool:
|
||||||
assert isinstance(element, FlowCoin)
|
assert isinstance(element, FlowCoin)
|
||||||
assert_true(
|
assert_true(
|
||||||
await (await self.cheque.usedx()).contains(
|
await self.cheque.usedx.contains(
|
||||||
KeyValue(HashPoint.of(element), HashPoint.of(FlowTransaction.empty()))
|
KeyValue(HashPoint.of(element), HashPoint.of(FlowTransaction.empty()))
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@ -251,24 +334,33 @@ class UsedXVerification(
|
|||||||
assert isinstance(cheque, FlowCheque)
|
assert isinstance(cheque, FlowCheque)
|
||||||
self.cheque = cheque
|
self.cheque = cheque
|
||||||
|
|
||||||
async def _verify_transaction(self, transaction: FlowTransaction) -> bool:
|
async def _verify_transaction_registred(self, transaction: FlowTransaction) -> bool:
|
||||||
assert isinstance(transaction, FlowTransaction)
|
assert isinstance(transaction, FlowTransaction)
|
||||||
assert_true(
|
assert_true(
|
||||||
await (await self.cheque.transactions()).contains(
|
await self.cheque.transactions.contains(
|
||||||
transaction
|
transaction
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
async def _verify_transaction_contains(cls, transaction: FlowTransaction, coin: HashPoint[FlowCoin]) -> bool:
|
async def _verify_coin_contained_in_transaction(
|
||||||
|
cls, transaction: FlowTransaction, coin: HashPoint[FlowCoin]
|
||||||
|
) -> bool:
|
||||||
assert isinstance(transaction, FlowTransaction)
|
assert isinstance(transaction, FlowTransaction)
|
||||||
assert isinstance(coin, HashPoint)
|
assert isinstance(coin, HashPoint)
|
||||||
in_coin: HashPoint[FlowCoin]
|
data_resolved: FlowTransactionData
|
||||||
async for in_coin in (await transaction.data_resolved()).iter_in_coins():
|
coin_resoled: FlowCoin
|
||||||
if in_coin == coin:
|
data_resolved, coin_resoled = await gather(
|
||||||
|
transaction.data_resolved(),
|
||||||
|
coin.resolve(),
|
||||||
|
)
|
||||||
|
assert_true(await data_resolved.in_coins.contains(coin_resoled))
|
||||||
|
return True
|
||||||
|
|
||||||
|
async def _verify_coin_registred_as_used(self, coin: HashPoint[FlowCoin]) -> bool:
|
||||||
|
assert_true(await self.cheque.used.contains(await coin.resolve()))
|
||||||
return True
|
return True
|
||||||
raise ValueError('used coin not found in transaction')
|
|
||||||
|
|
||||||
async def verify(self, element: KeyValue[FlowCoin, FlowTransaction]) -> bool:
|
async def verify(self, element: KeyValue[FlowCoin, FlowTransaction]) -> bool:
|
||||||
assert isinstance(element, KeyValue)
|
assert isinstance(element, KeyValue)
|
||||||
@ -276,8 +368,9 @@ class UsedXVerification(
|
|||||||
assert isinstance(transaction, FlowTransaction)
|
assert isinstance(transaction, FlowTransaction)
|
||||||
assert_trues(
|
assert_trues(
|
||||||
await gather(
|
await gather(
|
||||||
self._verify_transaction(transaction),
|
self._verify_transaction_registred(transaction),
|
||||||
self._verify_transaction_contains(transaction, element.key),
|
self._verify_coin_contained_in_transaction(transaction, element.key),
|
||||||
|
self._verify_coin_registred_as_used(element.key),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
return True
|
return True
|
||||||
|
29
rainbowadn/flow13/_flowiterate.py
Normal file
29
rainbowadn/flow13/_flowiterate.py
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
from typing import Generic, Iterable, TypeVar
|
||||||
|
|
||||||
|
from rainbowadn.flow.core import *
|
||||||
|
|
||||||
|
__all__ = ('FlowIterate',)
|
||||||
|
|
||||||
|
Element = TypeVar('Element')
|
||||||
|
|
||||||
|
|
||||||
|
class FlowIterate(
|
||||||
|
Reduce[Element, Iterable[Element]],
|
||||||
|
Generic[Element]
|
||||||
|
):
|
||||||
|
async def reduce(self, out: Iterable[Element], element: Element) -> Iterable[Element]:
|
||||||
|
def wrap() -> Iterable[Element]:
|
||||||
|
yield from out
|
||||||
|
yield element
|
||||||
|
|
||||||
|
return wrap()
|
||||||
|
|
||||||
|
async def merge(self, left: Iterable[Element], right: Iterable[Element]) -> Iterable[Element]:
|
||||||
|
def wrap() -> Iterable[Element]:
|
||||||
|
yield from left
|
||||||
|
yield from right
|
||||||
|
|
||||||
|
return wrap()
|
||||||
|
|
||||||
|
def loose(self) -> Reduce[Element, Iterable[Element]]:
|
||||||
|
return self
|
@ -1,15 +1,19 @@
|
|||||||
from typing import Any, Generic, TypeAlias, TypeVar
|
from typing import Any, Generic, Iterable, TypeAlias, TypeVar
|
||||||
|
|
||||||
from rainbowadn.atomic import *
|
from rainbowadn.atomic import *
|
||||||
|
from rainbowadn.collection.comparison import *
|
||||||
|
from rainbowadn.collection.keymetadata import *
|
||||||
from rainbowadn.collection.trees.binary import *
|
from rainbowadn.collection.trees.binary import *
|
||||||
from rainbowadn.collection.trees.binary.actions import *
|
|
||||||
from rainbowadn.collection.trees.binary.core import *
|
from rainbowadn.collection.trees.binary.core import *
|
||||||
from rainbowadn.core import *
|
from rainbowadn.core import *
|
||||||
from rainbowadn.flow.core import *
|
from rainbowadn.flow.core import *
|
||||||
|
from rainbowadn.flow.primitive import *
|
||||||
|
from rainbowadn.flow.verification.core import *
|
||||||
|
from rainbowadn.nullability import *
|
||||||
from ._binaryflow import *
|
from ._binaryflow import *
|
||||||
from ._flowtree import *
|
from ._flowtree import *
|
||||||
|
|
||||||
__all__ = ('FlowStandard',)
|
__all__ = ('FlowStandard', 'FlowStandardFactory',)
|
||||||
|
|
||||||
KeyT = TypeVar('KeyT')
|
KeyT = TypeVar('KeyT')
|
||||||
FS: TypeAlias = 'FlowStandard[KeyT]'
|
FS: TypeAlias = 'FlowStandard[KeyT]'
|
||||||
@ -18,15 +22,27 @@ BP: TypeAlias = 'BinaryProtocolized[KeyT, Integer, ABT]'
|
|||||||
|
|
||||||
|
|
||||||
class FlowStandard(
|
class FlowStandard(
|
||||||
|
RecursiveMentionable,
|
||||||
FlowTree[KeyT, FS],
|
FlowTree[KeyT, FS],
|
||||||
Generic[KeyT]
|
Generic[KeyT]
|
||||||
):
|
):
|
||||||
|
def points(self) -> Iterable[HashPoint]:
|
||||||
|
return self.protocolized.tree.reference.points()
|
||||||
|
|
||||||
|
def __bytes__(self):
|
||||||
|
return bytes(self.protocolized.tree.reference)
|
||||||
|
|
||||||
|
def __factory__(self) -> RainbowFactory['FlowStandard[KeyT]']:
|
||||||
|
return FlowStandardFactory(
|
||||||
|
self.protocolized.tree.reference.factory
|
||||||
|
)
|
||||||
|
|
||||||
def __init__(self, protocolized: BP):
|
def __init__(self, protocolized: BP):
|
||||||
assert isinstance(protocolized, BinaryProtocolized)
|
assert isinstance(protocolized, BinaryProtocolized)
|
||||||
self.protocolized = protocolized
|
self.protocolized = protocolized
|
||||||
|
|
||||||
async def contains(self, key: KeyT) -> bool:
|
async def contains(self, key: KeyT) -> bool:
|
||||||
return await ContainsAction(key).on(self.protocolized)
|
return await self.protocolized.tree.contains(HashPoint.of(key))
|
||||||
|
|
||||||
def _protocolized(self: FS) -> BP:
|
def _protocolized(self: FS) -> BP:
|
||||||
return self.protocolized
|
return self.protocolized
|
||||||
@ -41,5 +57,70 @@ class FlowStandard(
|
|||||||
assert_true(await VerifySubsetAction(reducer).on(self.protocolized))
|
assert_true(await VerifySubsetAction(reducer).on(self.protocolized))
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
async def verify_empty(self) -> bool:
|
||||||
|
assert_true(
|
||||||
|
await ReduceVerification(
|
||||||
|
MapperVerification(ConstMapper(False))
|
||||||
|
).verify(
|
||||||
|
await self.reducer()
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
|
||||||
async def reducer(self) -> Reducer[KeyT, Any]:
|
async def reducer(self) -> Reducer[KeyT, Any]:
|
||||||
return BinaryReducer(self.protocolized).loose()
|
return BinaryReducer(self.protocolized).loose()
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if isinstance(other, FlowStandard):
|
||||||
|
return self.protocolized.tree == other.protocolized.tree
|
||||||
|
else:
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
async def str(self, tab: int) -> str:
|
||||||
|
assert isinstance(tab, int)
|
||||||
|
return await self.protocolized.tree.reference.str(tab)
|
||||||
|
|
||||||
|
|
||||||
|
class FlowStandardFactory(RainbowFactory[FlowStandard[KeyT]], Generic[KeyT]):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
factory: RainbowFactory[BinaryTree[KeyMetadata[KeyT, Integer]]]
|
||||||
|
):
|
||||||
|
assert isinstance(factory, RainbowFactory)
|
||||||
|
self.factory = factory
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def of(cls, factory: RainbowFactory[KeyT]) -> 'FlowStandardFactory[KeyT]':
|
||||||
|
assert isinstance(factory, RainbowFactory)
|
||||||
|
return FlowStandardFactory(
|
||||||
|
BinaryTreeFactory(KeyMetadataFactory(factory, Integer.factory()))
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def off(cls, factory: RainbowFactory[KeyT], keys: Iterable[KeyT]) -> 'FlowStandard[KeyT]':
|
||||||
|
assert isinstance(factory, RainbowFactory)
|
||||||
|
abt: ActiveBinaryTree[KeyT, Integer] = cls.empty(factory).protocolized.tree
|
||||||
|
for key in keys:
|
||||||
|
abt = await abt.add(HashPoint.of(key))
|
||||||
|
return FlowStandard(abt.protocolized())
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def protocol(cls) -> BinaryBalancing[KeyT, Integer, ABT]:
|
||||||
|
return AVL(HashComparator(Fail()))
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def empty(cls, factory: RainbowFactory[KeyT]) -> 'FlowStandard[KeyT]':
|
||||||
|
assert isinstance(factory, RainbowFactory)
|
||||||
|
return FlowStandard(
|
||||||
|
ActiveBinaryTree.empty(cls.protocol(), factory).protocolized()
|
||||||
|
)
|
||||||
|
|
||||||
|
def from_bytes(self, source: bytes, resolver: HashResolver) -> FlowStandard[KeyT]:
|
||||||
|
assert isinstance(source, bytes)
|
||||||
|
assert isinstance(resolver, HashResolver)
|
||||||
|
return FlowStandard(
|
||||||
|
ActiveBinaryTree(
|
||||||
|
self.protocol(),
|
||||||
|
NullableReferenceFactory(self.factory).from_bytes(source, resolver)
|
||||||
|
).protocolized()
|
||||||
|
)
|
||||||
|
@ -1,14 +1,14 @@
|
|||||||
from typing import Any, AsyncIterable, Iterable
|
from typing import Any, Iterable
|
||||||
|
|
||||||
import nacl.signing
|
from nacl.signing import SigningKey
|
||||||
|
|
||||||
from rainbowadn.atomic import *
|
from rainbowadn.atomic import *
|
||||||
from rainbowadn.collection.linear import *
|
from rainbowadn.collection.keyvalue import *
|
||||||
from rainbowadn.core import *
|
from rainbowadn.core import *
|
||||||
from rainbowadn.flow.bridge import *
|
|
||||||
from rainbowadn.flow.core import *
|
from rainbowadn.flow.core import *
|
||||||
from rainbowadn.nullability import *
|
from rainbowadn.flow.verification.core import *
|
||||||
from rainbowadn.v13 import *
|
from rainbowadn.v13 import Signature, Subject
|
||||||
|
from ._flowstandard import *
|
||||||
|
|
||||||
__all__ = ('FlowCoinData', 'FlowCoin', 'FlowTransactionData', 'FlowTransaction',)
|
__all__ = ('FlowCoinData', 'FlowCoin', 'FlowTransactionData', 'FlowTransaction',)
|
||||||
|
|
||||||
@ -64,24 +64,21 @@ class FlowCoin(RecursiveMentionable, StaticMentionable):
|
|||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
data: HashPoint[FlowCoinData],
|
data: HashPoint[FlowCoinData],
|
||||||
transaction: HashPoint['FlowTransaction'],
|
transaction: HashPoint['FlowTransaction']
|
||||||
index: HashPoint[Integer]
|
|
||||||
):
|
):
|
||||||
assert isinstance(data, HashPoint)
|
assert isinstance(data, HashPoint)
|
||||||
assert isinstance(transaction, HashPoint)
|
assert isinstance(transaction, HashPoint)
|
||||||
assert isinstance(index, HashPoint)
|
|
||||||
self.data = data
|
self.data = data
|
||||||
self.transaction = transaction
|
self.transaction = transaction
|
||||||
self.index = index
|
|
||||||
|
|
||||||
async def data_resolved(self) -> FlowCoinData:
|
async def data_resolved(self) -> FlowCoinData:
|
||||||
return await self.data.resolve()
|
return await self.data.resolve()
|
||||||
|
|
||||||
def points(self) -> Iterable[HashPoint]:
|
def points(self) -> Iterable[HashPoint]:
|
||||||
return [self.data, self.transaction, self.index]
|
return [self.data, self.transaction]
|
||||||
|
|
||||||
def __bytes__(self):
|
def __bytes__(self):
|
||||||
return bytes(self.data) + bytes(self.transaction) + bytes(self.index)
|
return bytes(self.data) + bytes(self.transaction)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_bytes(cls, source: bytes, resolver: HashResolver) -> 'FlowCoin':
|
def from_bytes(cls, source: bytes, resolver: HashResolver) -> 'FlowCoin':
|
||||||
@ -92,22 +89,14 @@ class FlowCoin(RecursiveMentionable, StaticMentionable):
|
|||||||
ResolverOrigin(
|
ResolverOrigin(
|
||||||
FlowTransaction.factory(), source[HashPoint.HASH_LENGTH:2 * HashPoint.HASH_LENGTH], resolver
|
FlowTransaction.factory(), source[HashPoint.HASH_LENGTH:2 * HashPoint.HASH_LENGTH], resolver
|
||||||
).hash_point(),
|
).hash_point(),
|
||||||
ResolverOrigin(Integer.factory(), source[2 * HashPoint.HASH_LENGTH:], resolver).hash_point(),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
async def str(self, tab: int) -> str:
|
async def str(self, tab: int) -> str:
|
||||||
assert isinstance(tab, int)
|
assert isinstance(tab, int)
|
||||||
data_str, index_str = await gather(
|
|
||||||
hash_point_format(self.data, tab + 1),
|
|
||||||
hash_point_format(self.index, tab + 1),
|
|
||||||
)
|
|
||||||
assert isinstance(data_str, str)
|
|
||||||
assert isinstance(index_str, str)
|
|
||||||
return f'(' \
|
return f'(' \
|
||||||
f'{tabulate(tab + 1)}coin' \
|
f'{tabulate(tab + 1)}coin' \
|
||||||
f'{tabulate(tab + 1)}{data_str}' \
|
f'{tabulate(tab + 1)}{await hash_point_format(self.data, tab + 1)}' \
|
||||||
f'{tabulate(tab + 1)}(origin)' \
|
f'{tabulate(tab + 1)}(origin)' \
|
||||||
f'{tabulate(tab + 1)}{index_str}' \
|
|
||||||
f'{tabulate(tab)})'
|
f'{tabulate(tab)})'
|
||||||
|
|
||||||
async def int_value(self) -> int:
|
async def int_value(self) -> int:
|
||||||
@ -120,11 +109,11 @@ class FlowCoin(RecursiveMentionable, StaticMentionable):
|
|||||||
class FlowTransactionData(RecursiveMentionable, StaticMentionable):
|
class FlowTransactionData(RecursiveMentionable, StaticMentionable):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
in_coins: NullableReference[Stack[FlowCoin]],
|
in_coins: FlowStandard[FlowCoin],
|
||||||
out_coins: NullableReference[Stack[FlowCoinData]],
|
out_coins: FlowStandard[FlowCoinData],
|
||||||
):
|
):
|
||||||
assert isinstance(in_coins, NullableReference)
|
assert isinstance(in_coins, FlowStandard)
|
||||||
assert isinstance(out_coins, NullableReference)
|
assert isinstance(out_coins, FlowStandard)
|
||||||
self.in_coins = in_coins
|
self.in_coins = in_coins
|
||||||
self.out_coins = out_coins
|
self.out_coins = out_coins
|
||||||
self.hash_point = HashPoint.of(self)
|
self.hash_point = HashPoint.of(self)
|
||||||
@ -140,13 +129,10 @@ class FlowTransactionData(RecursiveMentionable, StaticMentionable):
|
|||||||
def from_bytes(cls, source: bytes, resolver: HashResolver) -> 'FlowTransactionData':
|
def from_bytes(cls, source: bytes, resolver: HashResolver) -> 'FlowTransactionData':
|
||||||
assert isinstance(source, bytes)
|
assert isinstance(source, bytes)
|
||||||
assert isinstance(resolver, HashResolver)
|
assert isinstance(resolver, HashResolver)
|
||||||
|
|
||||||
return cls(
|
return cls(
|
||||||
NullableReferenceFactory(
|
FlowStandardFactory.of(FlowCoin.factory()).from_bytes(source[:HashPoint.HASH_LENGTH], resolver),
|
||||||
StackFactory(FlowCoin.factory()).loose()
|
FlowStandardFactory.of(FlowCoinData.factory()).from_bytes(source[HashPoint.HASH_LENGTH:], resolver),
|
||||||
).from_bytes(source[:HashPoint.HASH_LENGTH], resolver),
|
|
||||||
NullableReferenceFactory(
|
|
||||||
StackFactory(FlowCoinData.factory()).loose()
|
|
||||||
).from_bytes(source[HashPoint.HASH_LENGTH:], resolver),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
async def _signature_verify(self, coin: FlowCoin, signature: Signature) -> bool:
|
async def _signature_verify(self, coin: FlowCoin, signature: Signature) -> bool:
|
||||||
@ -160,48 +146,69 @@ class FlowTransactionData(RecursiveMentionable, StaticMentionable):
|
|||||||
)
|
)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
async def _verify_signatures(
|
@classmethod
|
||||||
|
def _coin_verification_mapper(
|
||||||
|
cls,
|
||||||
|
signatures: FlowStandard[KeyValue[Subject, Signature]],
|
||||||
|
) -> Mapper[
|
||||||
|
FlowCoin,
|
||||||
|
bool
|
||||||
|
]:
|
||||||
|
assert isinstance(signatures, FlowStandard)
|
||||||
|
return CVMapper(signatures)
|
||||||
|
|
||||||
|
def _signature_pair_verification_mapper(self) -> Mapper[
|
||||||
|
KeyValue[Subject, Signature],
|
||||||
|
bool
|
||||||
|
]:
|
||||||
|
return SPVMapper(self.hash_point)
|
||||||
|
|
||||||
|
async def _verify_coin_signatures(
|
||||||
self,
|
self,
|
||||||
signatures: NullableReference[Stack[Signature]]
|
signatures: FlowStandard[KeyValue[Subject, Signature]],
|
||||||
) -> bool:
|
) -> bool:
|
||||||
assert isinstance(signatures, NullableReference)
|
assert isinstance(signatures, FlowStandard)
|
||||||
assert_trues(
|
assert_true(
|
||||||
await gather(
|
await ReduceVerification(
|
||||||
*[
|
MapperVerification(self._coin_verification_mapper(signatures))
|
||||||
self._signature_verify(coin, signature)
|
).loose().verify(
|
||||||
for
|
await self.in_coins.reducer()
|
||||||
coin, signature
|
|
||||||
in
|
|
||||||
zip(
|
|
||||||
await self.in_coins_resolved(),
|
|
||||||
await Stack.list(signatures),
|
|
||||||
strict=True
|
|
||||||
)
|
|
||||||
]
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def iter_in_coins(self) -> AsyncIterable[HashPoint[FlowCoin]]:
|
async def _verify_signature_pairs(
|
||||||
return Stack.iter(self.in_coins)
|
self,
|
||||||
|
signatures: FlowStandard[KeyValue[Subject, Signature]],
|
||||||
|
):
|
||||||
|
assert isinstance(signatures, FlowStandard)
|
||||||
|
assert_true(
|
||||||
|
await ReduceVerification(
|
||||||
|
MapperVerification(self._signature_pair_verification_mapper())
|
||||||
|
).loose().verify(
|
||||||
|
await signatures.reducer()
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
|
||||||
async def coins(self) -> list[FlowCoin]:
|
async def _verify_signatures(
|
||||||
return [await x.resolve() async for x in self.iter_in_coins()]
|
self,
|
||||||
|
signatures: FlowStandard[KeyValue[Subject, Signature]],
|
||||||
async def in_coins_resolved(self) -> list[FlowCoin]:
|
) -> bool:
|
||||||
return await Stack.list(self.in_coins)
|
assert isinstance(signatures, FlowStandard)
|
||||||
|
assert_trues(
|
||||||
def iter_out_coins(self) -> AsyncIterable[HashPoint[FlowCoinData]]:
|
await gather(
|
||||||
return Stack.iter(self.out_coins)
|
self._verify_coin_signatures(signatures),
|
||||||
|
self._verify_signature_pairs(signatures),
|
||||||
async def out_coins_resolved(self) -> list[FlowCoinData]:
|
)
|
||||||
return await Stack.list(self.out_coins)
|
)
|
||||||
|
return True
|
||||||
|
|
||||||
async def verify(
|
async def verify(
|
||||||
self,
|
self,
|
||||||
signatures: NullableReference[Stack[Signature]]
|
signatures: FlowStandard[KeyValue[Subject, Signature]]
|
||||||
) -> bool:
|
) -> bool:
|
||||||
assert isinstance(signatures, NullableReference)
|
assert isinstance(signatures, FlowStandard)
|
||||||
assert_true(await self._verify_signatures(signatures))
|
assert_true(await self._verify_signatures(signatures))
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@ -218,15 +225,56 @@ class FlowTransactionData(RecursiveMentionable, StaticMentionable):
|
|||||||
f'{tabulate(tab)}(out)' \
|
f'{tabulate(tab)}(out)' \
|
||||||
f'{tabulate(tab)}{out_str}'
|
f'{tabulate(tab)}{out_str}'
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def empty(cls) -> 'FlowTransactionData':
|
||||||
|
return cls(
|
||||||
|
FlowStandardFactory.empty(FlowCoin.factory()),
|
||||||
|
FlowStandardFactory.empty(FlowCoinData.factory()),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class CVMapper(Mapper[FlowCoin, bool]):
|
||||||
|
def __init__(self, signatures: FlowStandard[KeyValue[Subject, Signature]]):
|
||||||
|
assert isinstance(signatures, FlowStandard)
|
||||||
|
self.signatures = signatures
|
||||||
|
|
||||||
|
async def map(self, element: FlowCoin) -> bool:
|
||||||
|
assert isinstance(element, FlowCoin)
|
||||||
|
assert_true(
|
||||||
|
await self.signatures.contains(
|
||||||
|
KeyValue((await element.data.resolve()).owner, HashPoint.of(Signature.empty()))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
class SPVMapper(Mapper[KeyValue[Subject, Signature], bool]):
|
||||||
|
def __init__(self, hashpoint: HashPoint):
|
||||||
|
assert isinstance(hashpoint, HashPoint)
|
||||||
|
self.hashpoint = hashpoint
|
||||||
|
|
||||||
|
async def map(self, element: KeyValue[Subject, Signature]) -> bool:
|
||||||
|
assert isinstance(element, KeyValue)
|
||||||
|
subject: Subject
|
||||||
|
signature: Signature
|
||||||
|
subject, signature = await gather(
|
||||||
|
element.key,
|
||||||
|
element.value,
|
||||||
|
)
|
||||||
|
assert isinstance(subject, Subject)
|
||||||
|
assert isinstance(signature, Signature)
|
||||||
|
assert_true(signature.verify(subject, self.hashpoint))
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
class FlowTransaction(RecursiveMentionable, StaticMentionable):
|
class FlowTransaction(RecursiveMentionable, StaticMentionable):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
data: HashPoint[FlowTransactionData],
|
data: HashPoint[FlowTransactionData],
|
||||||
signatures: NullableReference[Stack[Signature]]
|
signatures: FlowStandard[KeyValue[Subject, Signature]]
|
||||||
):
|
):
|
||||||
assert isinstance(data, HashPoint)
|
assert isinstance(data, HashPoint)
|
||||||
assert isinstance(signatures, NullableReference)
|
assert isinstance(signatures, FlowStandard)
|
||||||
self.data = data
|
self.data = data
|
||||||
self.signatures = signatures
|
self.signatures = signatures
|
||||||
self.hash_point = HashPoint.of(self)
|
self.hash_point = HashPoint.of(self)
|
||||||
@ -245,45 +293,25 @@ class FlowTransaction(RecursiveMentionable, StaticMentionable):
|
|||||||
def from_bytes(cls, source: bytes, resolver: HashResolver) -> 'FlowTransaction':
|
def from_bytes(cls, source: bytes, resolver: HashResolver) -> 'FlowTransaction':
|
||||||
assert isinstance(source, bytes)
|
assert isinstance(source, bytes)
|
||||||
assert isinstance(resolver, HashResolver)
|
assert isinstance(resolver, HashResolver)
|
||||||
signature_factory: RainbowFactory[Signature] = Signature.factory()
|
|
||||||
assert isinstance(signature_factory, RainbowFactory)
|
|
||||||
stack_factory: RainbowFactory[Stack[Signature]] = StackFactory(signature_factory).loose()
|
|
||||||
assert isinstance(stack_factory, RainbowFactory)
|
|
||||||
return cls(
|
return cls(
|
||||||
ResolverOrigin(FlowTransactionData.factory(), source[:HashPoint.HASH_LENGTH], resolver).hash_point(),
|
ResolverOrigin(FlowTransactionData.factory(), source[:HashPoint.HASH_LENGTH], resolver).hash_point(),
|
||||||
NullableReferenceFactory(stack_factory).from_bytes(source[HashPoint.HASH_LENGTH:], resolver),
|
FlowStandardFactory.of(KeyValueFactory(Subject.factory(), Signature.factory()).loose()).from_bytes(
|
||||||
|
source[HashPoint.HASH_LENGTH:], resolver
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
async def iter_coins(
|
def _coin(self, data: FlowCoinData) -> FlowCoin:
|
||||||
self
|
return FlowCoin(HashPoint.of(data), self.hash_point)
|
||||||
) -> AsyncIterable[FlowCoin]:
|
|
||||||
transaction_data: FlowTransactionData = await self.data_resolved()
|
|
||||||
assert isinstance(transaction_data, FlowTransactionData)
|
|
||||||
index = 0
|
|
||||||
out_coin: HashPoint[FlowCoinData]
|
|
||||||
async for out_coin in transaction_data.iter_out_coins():
|
|
||||||
assert isinstance(out_coin, HashPoint)
|
|
||||||
coin: FlowCoin = FlowCoin(out_coin, self.hash_point, HashPoint.of(Integer(index)))
|
|
||||||
assert isinstance(coin, FlowCoin)
|
|
||||||
yield coin
|
|
||||||
index += 1
|
|
||||||
|
|
||||||
async def coins(
|
|
||||||
self
|
|
||||||
) -> list[FlowCoin]:
|
|
||||||
return [coin async for coin in self.iter_coins()]
|
|
||||||
|
|
||||||
async def used_reducer(self) -> Reducer[FlowCoin, Any]:
|
async def used_reducer(self) -> Reducer[FlowCoin, Any]:
|
||||||
transaction_data: FlowTransactionData = await self.data_resolved()
|
transaction_data: FlowTransactionData = await self.data_resolved()
|
||||||
assert isinstance(transaction_data, FlowTransactionData)
|
assert isinstance(transaction_data, FlowTransactionData)
|
||||||
bridge: Reducer[FlowCoin, Any] = ListBridge(await transaction_data.coins())
|
return await transaction_data.in_coins.reducer()
|
||||||
assert isinstance(bridge, Reducer)
|
|
||||||
return bridge
|
|
||||||
|
|
||||||
async def minted_reducer(self) -> Reducer[FlowCoin, Any]:
|
async def minted_reducer(self) -> Reducer[FlowCoin, Any]:
|
||||||
bridge: Reducer[FlowCoin, Any] = ListBridge(await self.coins())
|
transaction_data: FlowTransactionData = await self.data_resolved()
|
||||||
assert isinstance(bridge, Reducer)
|
assert isinstance(transaction_data, FlowTransactionData)
|
||||||
return bridge
|
return MapReducer(CallableMapper(self._coin), await transaction_data.out_coins.reducer())
|
||||||
|
|
||||||
async def verify(self):
|
async def verify(self):
|
||||||
data: FlowTransactionData = await self.data_resolved()
|
data: FlowTransactionData = await self.data_resolved()
|
||||||
@ -293,41 +321,49 @@ class FlowTransaction(RecursiveMentionable, StaticMentionable):
|
|||||||
|
|
||||||
async def str(self, tab: int) -> str:
|
async def str(self, tab: int) -> str:
|
||||||
assert isinstance(tab, int)
|
assert isinstance(tab, int)
|
||||||
data_str, signatures_str = await gather(
|
|
||||||
hash_point_format(self.data, tab + 1),
|
|
||||||
self.signatures.str(tab + 1),
|
|
||||||
)
|
|
||||||
assert isinstance(data_str, str)
|
|
||||||
assert isinstance(signatures_str, str)
|
|
||||||
return f'(' \
|
return f'(' \
|
||||||
f'{tabulate(tab + 1)}transaction' \
|
f'{tabulate(tab + 1)}transaction' \
|
||||||
f'{tabulate(tab + 1)}{data_str}' \
|
f'{tabulate(tab + 1)}{await hash_point_format(self.data, tab + 1)}' \
|
||||||
f'{tabulate(tab + 1)}{signatures_str}' \
|
f'{tabulate(tab + 1)}(signatures)' \
|
||||||
f'{tabulate(tab)})'
|
f'{tabulate(tab)})'
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def make(
|
def empty(cls):
|
||||||
cls,
|
return cls(
|
||||||
in_coins: list[FlowCoin],
|
HashPoint.of(FlowTransactionData.empty()),
|
||||||
out_coins: list[FlowCoinData],
|
FlowStandardFactory.empty(KeyValueFactory(Subject.factory(), Signature.factory()).loose()),
|
||||||
keys: list[nacl.signing.SigningKey],
|
|
||||||
) -> 'FlowTransaction':
|
|
||||||
assert isinstance(in_coins, list)
|
|
||||||
assert isinstance(out_coins, list)
|
|
||||||
assert isinstance(keys, list)
|
|
||||||
transaction_data = FlowTransactionData(
|
|
||||||
Stack.off(FlowCoin.factory(), reversed(in_coins)),
|
|
||||||
Stack.off(FlowCoinData.factory(), reversed(out_coins)),
|
|
||||||
)
|
|
||||||
assert isinstance(transaction_data, FlowTransactionData)
|
|
||||||
return FlowTransaction(
|
|
||||||
HashPoint.of(transaction_data),
|
|
||||||
Stack.off(
|
|
||||||
Signature.factory(),
|
|
||||||
(Signature.sign(key, HashPoint.of(transaction_data)) for key in reversed(keys))
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def empty(cls):
|
async def make(
|
||||||
return cls.make([], [], [])
|
cls,
|
||||||
|
used: Iterable[FlowCoin],
|
||||||
|
minted: Iterable[FlowCoinData],
|
||||||
|
keys: Iterable[SigningKey],
|
||||||
|
) -> 'FlowTransaction':
|
||||||
|
used_std: FlowStandard[FlowCoin]
|
||||||
|
minted_std: FlowStandard[FlowCoinData]
|
||||||
|
used_std, minted_std = await gather(
|
||||||
|
FlowStandardFactory.off(FlowCoin.factory(), used),
|
||||||
|
FlowStandardFactory.off(FlowCoinData.factory(), minted),
|
||||||
|
)
|
||||||
|
assert isinstance(used_std, FlowStandard)
|
||||||
|
assert isinstance(minted_std, FlowStandard)
|
||||||
|
transaction_data: FlowTransactionData = FlowTransactionData(
|
||||||
|
used_std,
|
||||||
|
minted_std,
|
||||||
|
)
|
||||||
|
signatures: list[KeyValue[Subject, Signature]] = [
|
||||||
|
KeyValue(
|
||||||
|
HashPoint.of(Subject(signing_key.verify_key)),
|
||||||
|
HashPoint.of(Signature.sign(signing_key, transaction_data.hash_point)),
|
||||||
|
)
|
||||||
|
for
|
||||||
|
signing_key
|
||||||
|
in
|
||||||
|
keys
|
||||||
|
]
|
||||||
|
return cls(
|
||||||
|
HashPoint.of(transaction_data),
|
||||||
|
await FlowStandardFactory.off(KeyValueFactory(Subject.factory(), Signature.factory()).loose(), signatures)
|
||||||
|
)
|
||||||
|
11
rainbowadn/flow13/_flowunion.py
Normal file
11
rainbowadn/flow13/_flowunion.py
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
from typing import TypeVar
|
||||||
|
|
||||||
|
from ._flowstandard import *
|
||||||
|
|
||||||
|
__all__ = ('flow_union',)
|
||||||
|
|
||||||
|
KeyT = TypeVar('KeyT')
|
||||||
|
|
||||||
|
|
||||||
|
async def flow_union(left: FlowStandard[KeyT], right: FlowStandard[KeyT]) -> FlowStandard[KeyT]:
|
||||||
|
return FlowStandard((await left.protocolized.tree.union(right.protocolized.tree)).protocolized())
|
@ -1,6 +1,10 @@
|
|||||||
import os
|
import os
|
||||||
|
import random
|
||||||
import time
|
import time
|
||||||
import unittest
|
import unittest
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from nacl.signing import SigningKey
|
||||||
|
|
||||||
from rainbowadn.atomic import *
|
from rainbowadn.atomic import *
|
||||||
from rainbowadn.collection.comparison import *
|
from rainbowadn.collection.comparison import *
|
||||||
@ -14,6 +18,7 @@ from rainbowadn.flow.sequence import *
|
|||||||
from rainbowadn.flow.stacked import *
|
from rainbowadn.flow.stacked import *
|
||||||
from rainbowadn.flow13 import *
|
from rainbowadn.flow13 import *
|
||||||
from rainbowadn.nullability import *
|
from rainbowadn.nullability import *
|
||||||
|
from rainbowadn.v13 import Subject
|
||||||
|
|
||||||
|
|
||||||
class PrintDispatch(SequenceDispatch[HashPoint, None]):
|
class PrintDispatch(SequenceDispatch[HashPoint, None]):
|
||||||
@ -71,7 +76,7 @@ class TestBridge(unittest.IsolatedAsyncioTestCase):
|
|||||||
return CallableMapper(cls.element_of)
|
return CallableMapper(cls.element_of)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
async def bridge(cls) -> Reducer[SequenceDispatcher[HashPoint[Plain], None], None]:
|
async def bridge(cls) -> Reducer[SequenceDispatcher[HashPoint[Plain], Any], Any]:
|
||||||
hp: HashPoint[Stack[Plain]] = Stack.off(Plain.factory(), [Plain(b'A'), Plain(b'B'), Plain(b'C')]).hashpoint()
|
hp: HashPoint[Stack[Plain]] = Stack.off(Plain.factory(), [Plain(b'A'), Plain(b'B'), Plain(b'C')]).hashpoint()
|
||||||
print(await hash_point_format(hp, 0))
|
print(await hash_point_format(hp, 0))
|
||||||
bridge = StackBridge(hp).over_elements()
|
bridge = StackBridge(hp).over_elements()
|
||||||
@ -91,6 +96,11 @@ class TestBridge(unittest.IsolatedAsyncioTestCase):
|
|||||||
await StackedReducer(bridge).loose().reduce(PrintReduce().loose())
|
await StackedReducer(bridge).loose().reduce(PrintReduce().loose())
|
||||||
)
|
)
|
||||||
|
|
||||||
|
async def test_iterator(self):
|
||||||
|
set_gather_linear()
|
||||||
|
bridge = ListBridge(list(range(13)))
|
||||||
|
print(list(await bridge.reduce(FlowIterate([]).loose())))
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
async def abt_of(cls, *plains: bytes) -> ActiveBinaryTree[Plain, Integer]:
|
async def abt_of(cls, *plains: bytes) -> ActiveBinaryTree[Plain, Integer]:
|
||||||
abt: ActiveBinaryTree[Plain, Integer] = ActiveBinaryTree.empty(AVL(PlainComparator(Fail())), Plain.factory())
|
abt: ActiveBinaryTree[Plain, Integer] = ActiveBinaryTree.empty(AVL(PlainComparator(Fail())), Plain.factory())
|
||||||
@ -102,7 +112,9 @@ class TestBridge(unittest.IsolatedAsyncioTestCase):
|
|||||||
set_gather_linear()
|
set_gather_linear()
|
||||||
set0 = {os.urandom(8).hex().encode() for _ in range(64)}
|
set0 = {os.urandom(8).hex().encode() for _ in range(64)}
|
||||||
abt0: ActiveBinaryTree[Plain, Integer] = await self.abt_of(*set0)
|
abt0: ActiveBinaryTree[Plain, Integer] = await self.abt_of(*set0)
|
||||||
abt1: ActiveBinaryTree[Plain, Integer] = await abt0.add(HashPoint.of(Plain(os.urandom(8).hex().encode())))
|
abt1: ActiveBinaryTree[Plain, Integer] = abt0
|
||||||
|
for _ in range(16):
|
||||||
|
abt1 = await abt1.add(HashPoint.of(Plain(os.urandom(8).hex().encode())))
|
||||||
fs0 = FlowStandard(abt0.protocolized())
|
fs0 = FlowStandard(abt0.protocolized())
|
||||||
fs1 = FlowStandard(abt1.protocolized())
|
fs1 = FlowStandard(abt1.protocolized())
|
||||||
_t = time.process_time()
|
_t = time.process_time()
|
||||||
@ -111,3 +123,41 @@ class TestBridge(unittest.IsolatedAsyncioTestCase):
|
|||||||
with self.assertRaises(ValueError):
|
with self.assertRaises(ValueError):
|
||||||
await fs1.verify_subset(UnitReducer(fs0))
|
await fs1.verify_subset(UnitReducer(fs0))
|
||||||
print('verification time', time.process_time() - _t)
|
print('verification time', time.process_time() - _t)
|
||||||
|
|
||||||
|
async def test_flow13(self):
|
||||||
|
set_gather_linear()
|
||||||
|
key = SigningKey.generate()
|
||||||
|
subject = Subject(key.verify_key)
|
||||||
|
transaction = await FlowTransaction.make(
|
||||||
|
[],
|
||||||
|
[FlowCoinData.of(subject, 1000)],
|
||||||
|
[],
|
||||||
|
)
|
||||||
|
print(assert_true(await transaction.verify()))
|
||||||
|
cheque = await FlowCheque.make([transaction])
|
||||||
|
print(assert_true(await cheque.verify()))
|
||||||
|
block = await BankBlock.empty().add(cheque)
|
||||||
|
print(assert_true(await block.verify()))
|
||||||
|
enable_newline()
|
||||||
|
print(await block.reference.str(0))
|
||||||
|
disable_newline()
|
||||||
|
|
||||||
|
async def test_13_stress(self):
|
||||||
|
set_gather_linear()
|
||||||
|
block: BankBlock = BankBlock.empty()
|
||||||
|
for _ in range(8):
|
||||||
|
block = await block.add(
|
||||||
|
await FlowCheque.make(
|
||||||
|
[
|
||||||
|
await FlowTransaction.make(
|
||||||
|
[],
|
||||||
|
[
|
||||||
|
FlowCoinData.of(Subject(SigningKey.generate().verify_key), 0)
|
||||||
|
for _ in range(random.randint(4, 7))
|
||||||
|
],
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
for _ in range(random.randint(4, 7))
|
||||||
|
]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
@ -95,3 +95,28 @@ class TestTrees(unittest.IsolatedAsyncioTestCase):
|
|||||||
for i in range(250):
|
for i in range(250):
|
||||||
tree = await tree.add(HashPoint.of(Plain(os.urandom(16))))
|
tree = await tree.add(HashPoint.of(Plain(os.urandom(16))))
|
||||||
print(await AVL.height(tree.protocolized()))
|
print(await AVL.height(tree.protocolized()))
|
||||||
|
|
||||||
|
async def test_split(self):
|
||||||
|
set_gather_linear()
|
||||||
|
protocol = AVL(PlainComparator(Replace()))
|
||||||
|
tree: ActiveBinaryTree[Plain, Integer] = ActiveBinaryTree.empty(
|
||||||
|
protocol, Plain.factory()
|
||||||
|
)
|
||||||
|
for i in range(26):
|
||||||
|
tree = await tree.add(HashPoint.of(Plain(bytes([ord('A') + i]))))
|
||||||
|
treel, treer = await tree.split(HashPoint.of(Plain(b'J')))
|
||||||
|
print(await treel.reference.str(0), ' << split >> ', await treer.reference.str(0))
|
||||||
|
|
||||||
|
async def test_union(self):
|
||||||
|
set_gather_linear()
|
||||||
|
protocol = AVL(PlainComparator(Replace()))
|
||||||
|
tree0: ActiveBinaryTree[Plain, Integer] = ActiveBinaryTree.empty(protocol, Plain.factory())
|
||||||
|
for i in range(10):
|
||||||
|
tree0 = await tree0.add(HashPoint.of(Plain(os.urandom(2).hex().encode())))
|
||||||
|
print(await tree0.reference.str(0))
|
||||||
|
tree1: ActiveBinaryTree[Plain, Integer] = ActiveBinaryTree.empty(protocol, Plain.factory())
|
||||||
|
for i in range(10):
|
||||||
|
tree1 = await tree1.add(HashPoint.of(Plain(os.urandom(2).hex().encode())))
|
||||||
|
print(await tree1.reference.str(0))
|
||||||
|
tree = await tree0.union(tree1)
|
||||||
|
print(await tree.reference.str(0))
|
||||||
|
@ -19,6 +19,10 @@ class Signature(Atomic):
|
|||||||
assert_eq(len(source), nacl.bindings.crypto_sign_BYTES)
|
assert_eq(len(source), nacl.bindings.crypto_sign_BYTES)
|
||||||
self.source = source
|
self.source = source
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def empty(cls) -> 'Signature':
|
||||||
|
return Signature(bytes(nacl.bindings.crypto_sign_BYTES))
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def sign(cls, key: nacl.signing.SigningKey, hash_point: HashPoint) -> 'Signature':
|
def sign(cls, key: nacl.signing.SigningKey, hash_point: HashPoint) -> 'Signature':
|
||||||
assert isinstance(key, nacl.signing.SigningKey)
|
assert isinstance(key, nacl.signing.SigningKey)
|
||||||
|
@ -1,9 +1,5 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import json
|
|
||||||
import os
|
|
||||||
import random
|
import random
|
||||||
import shutil
|
|
||||||
import time
|
|
||||||
from contextlib import ExitStack
|
from contextlib import ExitStack
|
||||||
|
|
||||||
from nacl.signing import SigningKey
|
from nacl.signing import SigningKey
|
||||||
@ -17,37 +13,7 @@ from rainbowadn.instrument import *
|
|||||||
from rainbowadn.nullability import *
|
from rainbowadn.nullability import *
|
||||||
from rainbowadn.testing.resolvers import *
|
from rainbowadn.testing.resolvers import *
|
||||||
from rainbowadn.v13 import *
|
from rainbowadn.v13 import *
|
||||||
|
from trace_common import *
|
||||||
|
|
||||||
def get_dr() -> ExtendableResolver:
|
|
||||||
dr = DictResolver()
|
|
||||||
dr = DelayedResolver(dr, lambda: random.uniform(0.200, 0.500))
|
|
||||||
# dr = CachingResolver(dr)
|
|
||||||
return dr
|
|
||||||
|
|
||||||
|
|
||||||
def target_str(target) -> str:
|
|
||||||
match target:
|
|
||||||
case type(__name__=name):
|
|
||||||
return name
|
|
||||||
case object(__class__=type(__name__=name)):
|
|
||||||
return name
|
|
||||||
|
|
||||||
|
|
||||||
def jsonify(dumped: Instrumentation) -> dict:
|
|
||||||
prefix = f'{target_str(dumped.target)}:{dumped.methodname}'
|
|
||||||
match dumped:
|
|
||||||
case Counter(counter=ctr):
|
|
||||||
return {f'{prefix}:counter': ctr}
|
|
||||||
case Concurrency(log=log):
|
|
||||||
return {f'{prefix}:concurrency': log}
|
|
||||||
case EntryExit(entry_log=entry_log, exit_log=exit_log):
|
|
||||||
return {
|
|
||||||
f'{prefix}:entry': entry_log,
|
|
||||||
f'{prefix}:exit': exit_log,
|
|
||||||
}
|
|
||||||
case _:
|
|
||||||
return {}
|
|
||||||
|
|
||||||
|
|
||||||
async def mock(bank: BankChain) -> BankChain:
|
async def mock(bank: BankChain) -> BankChain:
|
||||||
@ -137,15 +103,6 @@ async def _instrument(bank: BankChain) -> list[Instrumentation]:
|
|||||||
return instrumentations
|
return instrumentations
|
||||||
|
|
||||||
|
|
||||||
class DeintrumentationSize(Instrumentation):
|
|
||||||
def instrument(self, method, *args, **kwargs):
|
|
||||||
print(
|
|
||||||
f'deinstrumentation size @ {target_str(self.target)}:{self.methodname}',
|
|
||||||
len(self.deinstrumentation)
|
|
||||||
)
|
|
||||||
return method(*args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
async def _trace():
|
async def _trace():
|
||||||
set_gather_linear()
|
set_gather_linear()
|
||||||
bank = await _generate(
|
bank = await _generate(
|
||||||
@ -163,37 +120,12 @@ async def _trace():
|
|||||||
return instrumentations
|
return instrumentations
|
||||||
|
|
||||||
|
|
||||||
def _fn() -> str:
|
|
||||||
return f'trace/{int(time.time())}-{os.urandom(2).hex()}.json'
|
|
||||||
|
|
||||||
|
|
||||||
def _jsonify(instrumentations: list[Instrumentation]) -> dict:
|
|
||||||
jsonified = {}
|
|
||||||
for dumped in instrumentations:
|
|
||||||
jsonified |= jsonify(dumped)
|
|
||||||
return jsonified
|
|
||||||
|
|
||||||
|
|
||||||
def _dump(fn: str, jsonified: dict) -> None:
|
|
||||||
with open(fn, 'w') as file:
|
|
||||||
json.dump(
|
|
||||||
jsonified,
|
|
||||||
file
|
|
||||||
)
|
|
||||||
print('dumped')
|
|
||||||
|
|
||||||
|
|
||||||
def _copy(fn: str) -> None:
|
|
||||||
shutil.copy(fn, f'trace/latest.json')
|
|
||||||
print('copied')
|
|
||||||
|
|
||||||
|
|
||||||
async def main():
|
async def main():
|
||||||
instrumentations = await _trace()
|
instrumentations = await _trace()
|
||||||
fn = _fn()
|
fn = get_fn()
|
||||||
jsonified = _jsonify(instrumentations)
|
jsonified = jsonify_list(instrumentations)
|
||||||
_dump(fn, jsonified)
|
dump(fn, jsonified)
|
||||||
_copy(fn)
|
copy(fn)
|
||||||
plot(fn)
|
plot(fn)
|
||||||
print('plotted')
|
print('plotted')
|
||||||
|
|
76
trace_common.py
Normal file
76
trace_common.py
Normal file
@ -0,0 +1,76 @@
|
|||||||
|
import json
|
||||||
|
import os
|
||||||
|
import random
|
||||||
|
import shutil
|
||||||
|
import time
|
||||||
|
|
||||||
|
from rainbowadn.core import *
|
||||||
|
from rainbowadn.instrument import *
|
||||||
|
from rainbowadn.testing.resolvers import *
|
||||||
|
|
||||||
|
__all__ = ('get_dr', 'target_str', 'jsonify', 'get_fn', 'jsonify_list', 'dump', 'copy', 'DeintrumentationSize',)
|
||||||
|
|
||||||
|
|
||||||
|
def get_dr() -> ExtendableResolver:
|
||||||
|
dr = DictResolver()
|
||||||
|
dr = DelayedResolver(dr, lambda: random.uniform(0.200, 0.500))
|
||||||
|
# dr = CachingResolver(dr)
|
||||||
|
return dr
|
||||||
|
|
||||||
|
|
||||||
|
def target_str(target) -> str:
|
||||||
|
match target:
|
||||||
|
case type(__name__=name):
|
||||||
|
return name
|
||||||
|
case object(__class__=type(__name__=name)):
|
||||||
|
return name
|
||||||
|
|
||||||
|
|
||||||
|
def jsonify(dumped: Instrumentation) -> dict:
|
||||||
|
prefix = f'{target_str(dumped.target)}:{dumped.methodname}'
|
||||||
|
match dumped:
|
||||||
|
case Counter(counter=ctr):
|
||||||
|
return {f'{prefix}:counter': ctr}
|
||||||
|
case Concurrency(log=log):
|
||||||
|
return {f'{prefix}:concurrency': log}
|
||||||
|
case EntryExit(entry_log=entry_log, exit_log=exit_log):
|
||||||
|
return {
|
||||||
|
f'{prefix}:entry': entry_log,
|
||||||
|
f'{prefix}:exit': exit_log,
|
||||||
|
}
|
||||||
|
case _:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
def get_fn() -> str:
|
||||||
|
return f'trace/{int(time.time())}-{os.urandom(2).hex()}.json'
|
||||||
|
|
||||||
|
|
||||||
|
def jsonify_list(instrumentations: list[Instrumentation]) -> dict:
|
||||||
|
jsonified = {}
|
||||||
|
for dumped in instrumentations:
|
||||||
|
jsonified |= jsonify(dumped)
|
||||||
|
return jsonified
|
||||||
|
|
||||||
|
|
||||||
|
def dump(fn: str, jsonified: dict) -> None:
|
||||||
|
with open(fn, 'w') as file:
|
||||||
|
json.dump(
|
||||||
|
jsonified,
|
||||||
|
file
|
||||||
|
)
|
||||||
|
print('dumped')
|
||||||
|
|
||||||
|
|
||||||
|
def copy(fn: str) -> None:
|
||||||
|
shutil.copy(fn, f'trace/latest.json')
|
||||||
|
print('copied')
|
||||||
|
|
||||||
|
|
||||||
|
class DeintrumentationSize(Instrumentation):
|
||||||
|
def instrument(self, method, *args, **kwargs):
|
||||||
|
print(
|
||||||
|
f'deinstrumentation size @ {target_str(self.target)}:{self.methodname}',
|
||||||
|
len(self.deinstrumentation)
|
||||||
|
)
|
||||||
|
return method(*args, **kwargs)
|
107
trace_flow.py
Normal file
107
trace_flow.py
Normal file
@ -0,0 +1,107 @@
|
|||||||
|
import asyncio
|
||||||
|
import random
|
||||||
|
from contextlib import ExitStack
|
||||||
|
|
||||||
|
from nacl.signing import SigningKey
|
||||||
|
|
||||||
|
from plot import *
|
||||||
|
from rainbowadn.collection.trees.binary import *
|
||||||
|
from rainbowadn.collection.trees.binary.actions import *
|
||||||
|
from rainbowadn.core import *
|
||||||
|
from rainbowadn.flow13 import *
|
||||||
|
from rainbowadn.instrument import *
|
||||||
|
from rainbowadn.testing.resolvers import *
|
||||||
|
from rainbowadn.v13 import Subject
|
||||||
|
from trace_common import *
|
||||||
|
|
||||||
|
|
||||||
|
def get_instrumentations() -> list[Instrumentation]:
|
||||||
|
sleep_cc = Concurrency(DelayedResolver, 'sleep')
|
||||||
|
return [
|
||||||
|
sleep_cc,
|
||||||
|
Concurrency(ActiveBinaryTree, 'add'),
|
||||||
|
Concurrency(ActiveBinaryTree, 'contains'),
|
||||||
|
Concurrency(BinaryAction, 'on'),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
async def _generate(
|
||||||
|
blocks: int,
|
||||||
|
subjects_min: int,
|
||||||
|
subjects_max: int,
|
||||||
|
transactions_min: int,
|
||||||
|
transactions_max: int,
|
||||||
|
) -> BankBlock:
|
||||||
|
bank: BankBlock = BankBlock.empty()
|
||||||
|
for _ in range(blocks):
|
||||||
|
bank = await bank.add(
|
||||||
|
await FlowCheque.make(
|
||||||
|
[
|
||||||
|
await FlowTransaction.make(
|
||||||
|
[],
|
||||||
|
[
|
||||||
|
FlowCoinData.of(Subject(SigningKey.generate().verify_key), 0)
|
||||||
|
for _ in range(random.randint(subjects_min, subjects_max))
|
||||||
|
],
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
for _ in range(random.randint(transactions_min, transactions_max))
|
||||||
|
]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
print('generated')
|
||||||
|
return bank
|
||||||
|
|
||||||
|
|
||||||
|
async def _migrate(bank: BankBlock) -> BankBlock:
|
||||||
|
assert_true(await bank.verify())
|
||||||
|
bank = BankBlock(await get_dr().migrate_resolved(bank.reference))
|
||||||
|
print('migrated')
|
||||||
|
return bank
|
||||||
|
|
||||||
|
|
||||||
|
async def _instrument(bank: BankBlock) -> list[Instrumentation]:
|
||||||
|
with ExitStack() as estack:
|
||||||
|
instrumentations: list[Instrumentation] = get_instrumentations()
|
||||||
|
for stacked in instrumentations:
|
||||||
|
stacked.enter(estack)
|
||||||
|
assert_true(await bank.verify())
|
||||||
|
print('deinstrumentation (should be empty):', Instrumentation.deinstrumentation)
|
||||||
|
print('instrumented')
|
||||||
|
return instrumentations
|
||||||
|
|
||||||
|
|
||||||
|
async def _trace():
|
||||||
|
set_gather_linear()
|
||||||
|
bank = await _generate(
|
||||||
|
16,
|
||||||
|
8, 15,
|
||||||
|
8, 15,
|
||||||
|
)
|
||||||
|
# bank = await _generate(
|
||||||
|
# 8,
|
||||||
|
# 8, 8,
|
||||||
|
# 8, 8,
|
||||||
|
# )
|
||||||
|
bank = await _migrate(bank)
|
||||||
|
set_gather_asyncio()
|
||||||
|
with DeintrumentationSize(Instrumentation, 'deinstrument'):
|
||||||
|
with Counter(DeintrumentationSize, 'instrument') as de_ctr:
|
||||||
|
instrumentations = await _instrument(bank)
|
||||||
|
print(jsonify(de_ctr))
|
||||||
|
print('traced')
|
||||||
|
return instrumentations
|
||||||
|
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
instrumentations = await _trace()
|
||||||
|
fn = get_fn()
|
||||||
|
jsonified = jsonify_list(instrumentations)
|
||||||
|
dump(fn, jsonified)
|
||||||
|
copy(fn)
|
||||||
|
plot(fn)
|
||||||
|
print('plotted')
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
asyncio.run(main())
|
Loading…
Reference in New Issue
Block a user