various improvements
This commit is contained in:
parent
e745eb7c54
commit
b789b3c9f3
@ -118,7 +118,7 @@ class StackFactory(RainbowFactory[Stack[ElementType]], Generic[ElementType]):
|
|||||||
assert isinstance(source, bytes)
|
assert isinstance(source, bytes)
|
||||||
assert isinstance(resolver, HashResolver)
|
assert isinstance(resolver, HashResolver)
|
||||||
return Stack(
|
return Stack(
|
||||||
NullableReferenceFactory(self).from_bytes(source[:HashPoint.HASH_LENGTH], resolver),
|
NullableReference.f(self).from_bytes(source[:HashPoint.HASH_LENGTH], resolver),
|
||||||
ResolverOrigin(self.factory, source[HashPoint.HASH_LENGTH:], resolver).hash_point()
|
ResolverOrigin(self.factory, source[HashPoint.HASH_LENGTH:], resolver).hash_point()
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -1,66 +0,0 @@
|
|||||||
from typing import Generic, Iterable, TypeVar
|
|
||||||
|
|
||||||
from rainbowadn.core import *
|
|
||||||
|
|
||||||
__all__ = ('Pair', 'PairFactory',)
|
|
||||||
|
|
||||||
E0Type = TypeVar('E0Type')
|
|
||||||
E1Type = TypeVar('E1Type')
|
|
||||||
|
|
||||||
|
|
||||||
class Pair(
|
|
||||||
RecursiveMentionable,
|
|
||||||
Generic[E0Type, E1Type]
|
|
||||||
):
|
|
||||||
def __init__(self, element0: HashPoint[E0Type], element1: HashPoint[E1Type]):
|
|
||||||
assert isinstance(element0, HashPoint)
|
|
||||||
assert isinstance(element1, HashPoint)
|
|
||||||
self.element0 = element0
|
|
||||||
self.element1 = element1
|
|
||||||
|
|
||||||
def points(self) -> Iterable[HashPoint]:
|
|
||||||
return [self.element0, self.element1]
|
|
||||||
|
|
||||||
def __bytes__(self):
|
|
||||||
return bytes(self.element0) + bytes(self.element1)
|
|
||||||
|
|
||||||
def __factory__(self) -> RainbowFactory['Pair[E0Type, E1Type]']:
|
|
||||||
return PairFactory(self.element0.factory, self.element1.factory)
|
|
||||||
|
|
||||||
async def str(self, tab: int) -> str:
|
|
||||||
assert isinstance(tab, int)
|
|
||||||
e0_str, e1_str = await gather(
|
|
||||||
hash_point_format(self.element0, tab),
|
|
||||||
hash_point_format(self.element1, tab),
|
|
||||||
)
|
|
||||||
assert isinstance(e0_str, str)
|
|
||||||
assert isinstance(e1_str, str)
|
|
||||||
return f'(pair)' \
|
|
||||||
f'{tabulate(tab)}{e0_str}' \
|
|
||||||
f'{tabulate(tab)}{e1_str}'
|
|
||||||
|
|
||||||
|
|
||||||
class PairFactory(
|
|
||||||
RainbowFactory[Pair[E0Type, E1Type]],
|
|
||||||
Generic[E0Type, E1Type]
|
|
||||||
):
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
e0_factory: RainbowFactory[E0Type],
|
|
||||||
e1_factory: RainbowFactory[E1Type],
|
|
||||||
):
|
|
||||||
assert isinstance(e0_factory, RainbowFactory)
|
|
||||||
assert isinstance(e1_factory, RainbowFactory)
|
|
||||||
self.e0_factory = e0_factory
|
|
||||||
self.e1_factory = e1_factory
|
|
||||||
|
|
||||||
def from_bytes(self, source: bytes, resolver: HashResolver) -> Pair[E0Type, E1Type]:
|
|
||||||
assert isinstance(source, bytes)
|
|
||||||
assert isinstance(resolver, HashResolver)
|
|
||||||
return Pair(
|
|
||||||
ResolverOrigin(self.e0_factory, source[:HashPoint.HASH_LENGTH], resolver).hash_point(),
|
|
||||||
ResolverOrigin(self.e1_factory, source[HashPoint.HASH_LENGTH:], resolver).hash_point(),
|
|
||||||
)
|
|
||||||
|
|
||||||
def loose(self) -> RainbowFactory[Pair[E0Type, E1Type]]:
|
|
||||||
return self
|
|
@ -55,13 +55,14 @@ class BinaryTreeFactory(RainbowFactory[BinaryTree[TreeKeyType]], Generic[TreeKey
|
|||||||
def __init__(self, factory: RainbowFactory[TreeKeyType]):
|
def __init__(self, factory: RainbowFactory[TreeKeyType]):
|
||||||
assert isinstance(factory, RainbowFactory)
|
assert isinstance(factory, RainbowFactory)
|
||||||
self.factory = factory
|
self.factory = factory
|
||||||
|
self.reference_factory = NullableReference.f(self)
|
||||||
|
|
||||||
def from_bytes(self, source: bytes, resolver: HashResolver) -> BinaryTree[TreeKeyType]:
|
def from_bytes(self, source: bytes, resolver: HashResolver) -> BinaryTree[TreeKeyType]:
|
||||||
assert isinstance(source, bytes)
|
assert isinstance(source, bytes)
|
||||||
assert isinstance(resolver, HashResolver)
|
assert isinstance(resolver, HashResolver)
|
||||||
return BinaryTree(
|
return BinaryTree(
|
||||||
NullableReferenceFactory(self).from_bytes(source[:HashPoint.HASH_LENGTH], resolver),
|
self.reference_factory.from_bytes(source[:HashPoint.HASH_LENGTH], resolver),
|
||||||
NullableReferenceFactory(self).from_bytes(
|
self.reference_factory.from_bytes(
|
||||||
source[HashPoint.HASH_LENGTH:HashPoint.HASH_LENGTH * 2],
|
source[HashPoint.HASH_LENGTH:HashPoint.HASH_LENGTH * 2],
|
||||||
resolver
|
resolver
|
||||||
),
|
),
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
from typing import TypeAlias
|
from typing import TypeAlias
|
||||||
|
|
||||||
from rainbowadn.collection.pair import *
|
|
||||||
from rainbowadn.core import *
|
from rainbowadn.core import *
|
||||||
from rainbowadn.flow.core import *
|
from rainbowadn.flow.core import *
|
||||||
from rainbowadn.flow.verification.core import *
|
from rainbowadn.flow.verification.core import *
|
||||||
|
from rainbowadn.inlining import *
|
||||||
from rainbowadn.nullability import *
|
from rainbowadn.nullability import *
|
||||||
from ._bankflow import *
|
from ._bankflow import *
|
||||||
from ._flowbank import *
|
from ._flowbank import *
|
||||||
@ -13,11 +13,13 @@ from ._flowstandard import *
|
|||||||
|
|
||||||
__all__ = ('BankBlock',)
|
__all__ = ('BankBlock',)
|
||||||
|
|
||||||
Index: TypeAlias = FlowStandard[FlowBlock[Pair[FlowCheque, FlowBank]]]
|
Link: TypeAlias = IPair[FlowCheque, FlowBank]
|
||||||
|
Block: TypeAlias = FlowBlock[Link]
|
||||||
|
Index: TypeAlias = FlowStandard[FlowBlock[Link]]
|
||||||
|
|
||||||
|
|
||||||
class BankBlock:
|
class BankBlock:
|
||||||
def __init__(self, reference: NullableReference[FlowBlock[Pair[FlowCheque, FlowBank]]]):
|
def __init__(self, reference: NullableReference[Block]):
|
||||||
assert isinstance(reference, NullableReference)
|
assert isinstance(reference, NullableReference)
|
||||||
self.reference = reference
|
self.reference = reference
|
||||||
|
|
||||||
@ -26,8 +28,8 @@ class BankBlock:
|
|||||||
return BankFlow(FlowBank.empty())
|
return BankFlow(FlowBank.empty())
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def link_factory(cls) -> RainbowFactory[Pair[FlowCheque, FlowBank]]:
|
def link_factory(cls) -> RainbowFactory[Link]:
|
||||||
return PairFactory(FlowCheque.factory(), FlowBank.factory()).loose()
|
return IPair.f(FlowCheque.factory(), FlowBank.factory())
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def empty(cls) -> 'BankBlock':
|
def empty(cls) -> 'BankBlock':
|
||||||
@ -59,20 +61,20 @@ class AddCheque(
|
|||||||
self.cheque = cheque
|
self.cheque = cheque
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
async def _bank_for_link(cls, link: Nullable[Pair[FlowCheque, FlowBank]]) -> Nullable[FlowBank]:
|
async def _bank_for_link(cls, link: Nullable[Link]) -> Nullable[FlowBank]:
|
||||||
assert isinstance(link, Nullable)
|
assert isinstance(link, Nullable)
|
||||||
return Null() if link.null() else NotNull(await (link.resolve()).element1.resolve())
|
return Null() if link.null() else NotNull(await (link.resolve()).e1.resolve())
|
||||||
|
|
||||||
async def _next_bank_for_bank(self, bank: Nullable[FlowBank]) -> FlowBank:
|
async def _next_bank_for_bank(self, bank: Nullable[FlowBank]) -> FlowBank:
|
||||||
return await BankBlock.flow().add(bank, self.cheque)
|
return await BankBlock.flow().add(bank, self.cheque)
|
||||||
|
|
||||||
async def _link_for_bank(self, bank: FlowBank) -> HashPoint[Pair[FlowCheque, FlowBank]]:
|
async def _link_for_bank(self, bank: FlowBank) -> HashPoint[Link]:
|
||||||
assert isinstance(bank, FlowBank)
|
assert isinstance(bank, FlowBank)
|
||||||
return HashPoint.of(Pair(HashPoint.of(self.cheque), HashPoint.of(bank)))
|
return HashPoint.of(IPair.of(self.cheque, bank))
|
||||||
|
|
||||||
async def _next_link_for_link(
|
async def _next_link_for_link(
|
||||||
self, link: Nullable[Pair[FlowCheque, FlowBank]]
|
self, link: Nullable[Link]
|
||||||
) -> HashPoint[Pair[FlowCheque, FlowBank]]:
|
) -> HashPoint[Link]:
|
||||||
assert isinstance(link, Nullable)
|
assert isinstance(link, Nullable)
|
||||||
return await self._link_for_bank(
|
return await self._link_for_bank(
|
||||||
await self._next_bank_for_bank(
|
await self._next_bank_for_bank(
|
||||||
@ -83,8 +85,8 @@ class AddCheque(
|
|||||||
@classmethod
|
@classmethod
|
||||||
async def _add_link_to_reference(
|
async def _add_link_to_reference(
|
||||||
cls,
|
cls,
|
||||||
link: HashPoint[Pair[FlowCheque, FlowBank]],
|
link: HashPoint[Link],
|
||||||
reference: NullableReference[FlowBlock[Pair[FlowCheque, FlowBank]]],
|
reference: NullableReference[Block],
|
||||||
) -> BankBlock:
|
) -> BankBlock:
|
||||||
assert isinstance(link, HashPoint)
|
assert isinstance(link, HashPoint)
|
||||||
assert isinstance(reference, NullableReference)
|
assert isinstance(reference, NullableReference)
|
||||||
@ -92,8 +94,8 @@ class AddCheque(
|
|||||||
|
|
||||||
async def _add_to_link(
|
async def _add_to_link(
|
||||||
self,
|
self,
|
||||||
previous: Nullable[Pair[FlowCheque, FlowBank]],
|
previous: Nullable[Link],
|
||||||
reference: NullableReference[FlowBlock[Pair[FlowCheque, FlowBank]]],
|
reference: NullableReference[Block],
|
||||||
) -> BankBlock:
|
) -> BankBlock:
|
||||||
assert isinstance(previous, Nullable)
|
assert isinstance(previous, Nullable)
|
||||||
assert isinstance(reference, NullableReference)
|
assert isinstance(reference, NullableReference)
|
||||||
@ -101,7 +103,7 @@ class AddCheque(
|
|||||||
|
|
||||||
async def _add_to_reference(
|
async def _add_to_reference(
|
||||||
self,
|
self,
|
||||||
reference: NullableReference[FlowBlock[Pair[FlowCheque, FlowBank]]],
|
reference: NullableReference[Block],
|
||||||
) -> BankBlock:
|
) -> BankBlock:
|
||||||
assert isinstance(reference, NullableReference)
|
assert isinstance(reference, NullableReference)
|
||||||
return await self._add_to_link(await FlowBlock.link_of(reference), reference)
|
return await self._add_to_link(await FlowBlock.link_of(reference), reference)
|
||||||
|
@ -1,10 +1,12 @@
|
|||||||
from rainbowadn.collection.pair import *
|
from typing import TypeAlias
|
||||||
|
|
||||||
from rainbowadn.core import *
|
from rainbowadn.core import *
|
||||||
from rainbowadn.flow.bridge import *
|
from rainbowadn.flow.bridge import *
|
||||||
from rainbowadn.flow.core import *
|
from rainbowadn.flow.core import *
|
||||||
from rainbowadn.flow.primitive import *
|
from rainbowadn.flow.primitive import *
|
||||||
from rainbowadn.flow.verification.core import *
|
from rainbowadn.flow.verification.core import *
|
||||||
from rainbowadn.flow.verification.stateverification import *
|
from rainbowadn.flow.verification.stateverification import *
|
||||||
|
from rainbowadn.inlining import *
|
||||||
from rainbowadn.nullability import *
|
from rainbowadn.nullability import *
|
||||||
from ._flowbank import *
|
from ._flowbank import *
|
||||||
from ._flowcheque import *
|
from ._flowcheque import *
|
||||||
@ -14,6 +16,8 @@ from ._flowunion import *
|
|||||||
|
|
||||||
__all__ = ('BankFlow',)
|
__all__ = ('BankFlow',)
|
||||||
|
|
||||||
|
Link: TypeAlias = IPair[FlowCheque, FlowBank]
|
||||||
|
|
||||||
|
|
||||||
class BankFlow(
|
class BankFlow(
|
||||||
Verification[
|
Verification[
|
||||||
@ -136,18 +140,18 @@ class BankFlow(
|
|||||||
return FlowBank(minted, used)
|
return FlowBank(minted, used)
|
||||||
|
|
||||||
def link_verification(self) -> Verification[
|
def link_verification(self) -> Verification[
|
||||||
tuple[Nullable[HashPoint[Pair[FlowCheque, FlowBank]]], HashPoint[Pair[FlowCheque, FlowBank]]]
|
tuple[Nullable[HashPoint[Link]], HashPoint[Link]]
|
||||||
]:
|
]:
|
||||||
class Decomposition(Mapper[HashPoint[Pair[FlowCheque, FlowBank]], tuple[FlowCheque, FlowBank]]):
|
class Decomposition(Mapper[HashPoint[Link], tuple[FlowCheque, FlowBank]]):
|
||||||
async def map(self, element: HashPoint[Pair[FlowCheque, FlowBank]]) -> tuple[FlowCheque, FlowBank]:
|
async def map(self, element: HashPoint[Link]) -> tuple[FlowCheque, FlowBank]:
|
||||||
assert isinstance(element, HashPoint)
|
assert isinstance(element, HashPoint)
|
||||||
pair: Pair[FlowCheque, FlowBank] = await element.resolve()
|
link: Link = await element.resolve()
|
||||||
assert isinstance(pair, Pair)
|
assert isinstance(link, IPair)
|
||||||
cheque: FlowCheque
|
cheque: FlowCheque
|
||||||
bank: FlowBank
|
bank: FlowBank
|
||||||
cheque, bank = await gather(
|
cheque, bank = await gather(
|
||||||
pair.element0.resolve(),
|
link.e0.resolve(),
|
||||||
pair.element1.resolve(),
|
link.e1.resolve(),
|
||||||
)
|
)
|
||||||
assert isinstance(cheque, FlowCheque)
|
assert isinstance(cheque, FlowCheque)
|
||||||
assert isinstance(bank, FlowBank)
|
assert isinstance(bank, FlowBank)
|
||||||
|
@ -102,7 +102,7 @@ class FlowBlockFactory(RainbowFactory[FBL], Generic[LinkT]):
|
|||||||
assert isinstance(source, bytes)
|
assert isinstance(source, bytes)
|
||||||
assert isinstance(resolver, HashResolver)
|
assert isinstance(resolver, HashResolver)
|
||||||
return FlowBlock(
|
return FlowBlock(
|
||||||
NullableReferenceFactory(self).from_bytes(source[:HashPoint.HASH_LENGTH], resolver),
|
NullableReference.f(self).from_bytes(source[:HashPoint.HASH_LENGTH], resolver),
|
||||||
FlowStandardFactory.of(self, HashComparator(Fail())).from_bytes(
|
FlowStandardFactory.of(self, HashComparator(Fail())).from_bytes(
|
||||||
source[HashPoint.HASH_LENGTH:2 * HashPoint.HASH_LENGTH], resolver
|
source[HashPoint.HASH_LENGTH:2 * HashPoint.HASH_LENGTH], resolver
|
||||||
),
|
),
|
||||||
|
@ -91,13 +91,13 @@ class FlowStandardFactory(Inlining[FlowStandard[KeyT]], Generic[KeyT]):
|
|||||||
):
|
):
|
||||||
assert isinstance(factory, RainbowFactory)
|
assert isinstance(factory, RainbowFactory)
|
||||||
assert isinstance(comparator, Comparator)
|
assert isinstance(comparator, Comparator)
|
||||||
self.factory: Inlining[
|
self.factory: RainbowFactory[
|
||||||
NullableReference[BinaryTree[KeyMetadata[KeyT, Integer]]]
|
NullableReference[BinaryTree[KeyMetadata[KeyT, Integer]]]
|
||||||
] = NullableReferenceFactory(factory).loose()
|
] = NullableReference.f(factory)
|
||||||
self.comparator = comparator
|
self.comparator = comparator
|
||||||
|
|
||||||
def size(self) -> Optional[int]:
|
def size(self) -> Optional[int]:
|
||||||
return self.factory.size()
|
return Inlining.factory_size(self.factory)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def of(cls, factory: RainbowFactory[KeyT], comparator: Comparator[KeyT]) -> 'FlowStandardFactory[KeyT]':
|
def of(cls, factory: RainbowFactory[KeyT], comparator: Comparator[KeyT]) -> 'FlowStandardFactory[KeyT]':
|
||||||
|
@ -1,17 +1,19 @@
|
|||||||
__all__ = (
|
__all__ = (
|
||||||
'IAtomic',
|
'IAtomic',
|
||||||
'IAuto', 'IAutoFactory',
|
'IAuto', 'Auto', 'IAutoFactory',
|
||||||
'IByte',
|
'IByte',
|
||||||
'Inlining',
|
'Inlining',
|
||||||
'IPair',
|
'IPair',
|
||||||
|
'IRef',
|
||||||
'IStatic', 'IStaticFactory',
|
'IStatic', 'IStaticFactory',
|
||||||
'IUnit',
|
'IUnit',
|
||||||
)
|
)
|
||||||
|
|
||||||
from .iatomic import IAtomic
|
from .iatomic import IAtomic
|
||||||
from .iauto import IAuto, IAutoFactory
|
from .iauto import Auto, IAuto, IAutoFactory
|
||||||
from .ibyte import IByte
|
from .ibyte import IByte
|
||||||
from .inlining import Inlining
|
from .inlining import Inlining
|
||||||
from .ipair import *
|
from .ipair import IPair
|
||||||
|
from .iref import IRef
|
||||||
from .istatic import IStatic, IStaticFactory
|
from .istatic import IStatic, IStaticFactory
|
||||||
from .iunit import IUnit
|
from .iunit import IUnit
|
||||||
|
@ -1,17 +1,16 @@
|
|||||||
import heapq
|
import heapq
|
||||||
from io import BytesIO
|
from typing import Callable, Generic, Iterable, Optional, Type, TypeAlias, TypeVar, overload
|
||||||
from typing import Callable, Generic, Iterable, Optional, Type, TypeAlias, TypeVar
|
|
||||||
|
|
||||||
from rainbowadn.core import *
|
from rainbowadn.core import *
|
||||||
from .inlining import *
|
from .inlining import *
|
||||||
|
|
||||||
__all__ = ('IAuto', 'IAutoFactory',)
|
__all__ = ('IAuto', 'Auto', 'IAutoFactory',)
|
||||||
|
|
||||||
_IList: TypeAlias = list[tuple[int, Mentionable]]
|
_IList: TypeAlias = list[tuple[int, Mentionable]]
|
||||||
_UList: TypeAlias = list[tuple[int, HashPoint]]
|
_UList: TypeAlias = list[tuple[int, HashPoint]]
|
||||||
_SList: TypeAlias = list[tuple[int, RainbowFactory, int]]
|
_SList: TypeAlias = list[tuple[int, RainbowFactory, int]]
|
||||||
_VList: TypeAlias = list[tuple[int, RainbowFactory]]
|
_VList: TypeAlias = list[tuple[int, RainbowFactory]]
|
||||||
_MCall: TypeAlias = Callable[[bytes, HashResolver, _IList, _UList], None]
|
_MCall: TypeAlias = Callable[['Auto', _IList, _UList], None]
|
||||||
_MTuple: TypeAlias = tuple[int, _MCall, int]
|
_MTuple: TypeAlias = tuple[int, _MCall, int]
|
||||||
_IAuto = TypeVar('_IAuto')
|
_IAuto = TypeVar('_IAuto')
|
||||||
|
|
||||||
@ -157,6 +156,117 @@ class IAuto(RecursiveMentionable):
|
|||||||
return f'{tabulate(tab)}'.join(formatted)
|
return f'{tabulate(tab)}'.join(formatted)
|
||||||
|
|
||||||
|
|
||||||
|
T = TypeVar('T')
|
||||||
|
T0 = TypeVar('T0')
|
||||||
|
T1 = TypeVar('T1')
|
||||||
|
T2 = TypeVar('T2')
|
||||||
|
T3 = TypeVar('T3')
|
||||||
|
T4 = TypeVar('T4')
|
||||||
|
|
||||||
|
|
||||||
|
class Auto:
|
||||||
|
def __init__(self, source: bytes, resolver: HashResolver):
|
||||||
|
assert isinstance(source, bytes)
|
||||||
|
assert isinstance(resolver, HashResolver)
|
||||||
|
self.source = source
|
||||||
|
self.resolver = resolver
|
||||||
|
|
||||||
|
def as_hashpoint(self, factory: RainbowFactory[T]) -> HashPoint[T]:
|
||||||
|
assert isinstance(factory, RainbowFactory)
|
||||||
|
assert_eq(len(self.source), HashPoint.HASH_LENGTH)
|
||||||
|
return ResolverMetaOrigin(self.resolver).hash_point(factory, self.source)
|
||||||
|
|
||||||
|
def as_value(self, factory: RainbowFactory[T]) -> T:
|
||||||
|
return factory.from_bytes(self.source, self.resolver)
|
||||||
|
|
||||||
|
def as_mentionable(self, factory: RainbowFactory) -> Mentionable:
|
||||||
|
return self.as_value(factory)
|
||||||
|
|
||||||
|
def sub(self, start: Optional[int], stop: Optional[int]) -> 'Auto':
|
||||||
|
return Auto(self.source[start:stop], self.resolver)
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
self.__index = 0
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
|
assert_eq(len(self.source), self.__index)
|
||||||
|
|
||||||
|
def next(self, size: int) -> 'Auto':
|
||||||
|
assert isinstance(size, int)
|
||||||
|
index = self.__index
|
||||||
|
self.__index += size
|
||||||
|
return self.sub(index, self.__index)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _static_size(cls, *factories: RainbowFactory) -> int:
|
||||||
|
return sum(Inlining.factory_size(factory) for factory in factories)
|
||||||
|
|
||||||
|
def _static_simple(self, *factories: RainbowFactory) -> Iterable[Mentionable]:
|
||||||
|
with self:
|
||||||
|
for factory in factories:
|
||||||
|
size = Inlining.factory_size(factory)
|
||||||
|
assert isinstance(size, int)
|
||||||
|
yield self.next(size).as_mentionable(factory)
|
||||||
|
|
||||||
|
def _static_separate(self, infix_position: int, *factories: RainbowFactory) -> Iterable[Mentionable]:
|
||||||
|
prefix = factories[:infix_position]
|
||||||
|
postfix = factories[infix_position + 1:]
|
||||||
|
prefix_size = self._static_size(*prefix)
|
||||||
|
postfix_size = self._static_size(*postfix)
|
||||||
|
postfix_start = len(self.source) - postfix_size
|
||||||
|
yield from self.sub(None, prefix_size)._static_simple(*prefix)
|
||||||
|
yield self.sub(prefix_size, postfix_start).as_mentionable(factories[infix_position])
|
||||||
|
yield from self.sub(postfix_start, None)._static_simple(*postfix)
|
||||||
|
|
||||||
|
def _static(self, *factories: RainbowFactory) -> Iterable[Mentionable]:
|
||||||
|
infix_position: Optional[int] = None
|
||||||
|
for index, factory in enumerate(factories):
|
||||||
|
size: Optional[int] = Inlining.factory_size(factory)
|
||||||
|
if size is None:
|
||||||
|
if infix_position is None:
|
||||||
|
infix_position = index
|
||||||
|
else:
|
||||||
|
raise ValueError('static auto parse does not allow uninlineable values (two or more unsized)')
|
||||||
|
if infix_position is None:
|
||||||
|
yield from self._static_simple(*factories)
|
||||||
|
else:
|
||||||
|
yield from self._static_separate(infix_position, *factories)
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def static(
|
||||||
|
self, t0: RainbowFactory[T0]
|
||||||
|
) -> tuple[T0]:
|
||||||
|
...
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def static(
|
||||||
|
self, t0: RainbowFactory[T0], t1: RainbowFactory[T1]
|
||||||
|
) -> tuple[T0, T1]:
|
||||||
|
...
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def static(
|
||||||
|
self, t0: RainbowFactory[T0], t1: RainbowFactory[T1], t2: RainbowFactory[T2]
|
||||||
|
) -> tuple[T0, T1, T2]:
|
||||||
|
...
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def static(
|
||||||
|
self, t0: RainbowFactory[T0], t1: RainbowFactory[T1], t2: RainbowFactory[T2], t3: RainbowFactory[T3]
|
||||||
|
) -> tuple[T0, T1, T2, T3]:
|
||||||
|
...
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def static(
|
||||||
|
self, t0: RainbowFactory[T0], t1: RainbowFactory[T1], t2: RainbowFactory[T2], t3: RainbowFactory[T3],
|
||||||
|
t4: RainbowFactory[T4]
|
||||||
|
) -> tuple[T0, T1, T2, T3, T4]:
|
||||||
|
...
|
||||||
|
|
||||||
|
def static(self, *factories: RainbowFactory) -> tuple[Mentionable, ...]:
|
||||||
|
return tuple(self._static(*factories))
|
||||||
|
|
||||||
|
|
||||||
class IAutoFactory(Inlining[IAuto], Generic[_IAuto]):
|
class IAutoFactory(Inlining[IAuto], Generic[_IAuto]):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
@ -221,13 +331,12 @@ class IAutoFactory(Inlining[IAuto], Generic[_IAuto]):
|
|||||||
assert isinstance(index, int)
|
assert isinstance(index, int)
|
||||||
assert isinstance(factory, RainbowFactory)
|
assert isinstance(factory, RainbowFactory)
|
||||||
|
|
||||||
def wrap(source: bytes, resolver: HashResolver, inlined: _IList, uninlined: _UList) -> None:
|
def wrap(auto: Auto, inlined: _IList, uninlined: _UList) -> None:
|
||||||
assert isinstance(source, bytes)
|
assert isinstance(auto, Auto)
|
||||||
assert isinstance(resolver, HashResolver)
|
|
||||||
assert isinstance(inlined, list)
|
assert isinstance(inlined, list)
|
||||||
assert isinstance(uninlined, list)
|
assert isinstance(uninlined, list)
|
||||||
assert_eq(len(source), HashPoint.HASH_LENGTH)
|
assert_eq(len(auto.source), HashPoint.HASH_LENGTH)
|
||||||
uninlined.append((index, ResolverMetaOrigin(resolver).hash_point(factory, source)))
|
uninlined.append((index, auto.as_hashpoint(factory)))
|
||||||
|
|
||||||
return index, wrap, HashPoint.HASH_LENGTH
|
return index, wrap, HashPoint.HASH_LENGTH
|
||||||
|
|
||||||
@ -237,13 +346,12 @@ class IAutoFactory(Inlining[IAuto], Generic[_IAuto]):
|
|||||||
assert isinstance(factory, RainbowFactory)
|
assert isinstance(factory, RainbowFactory)
|
||||||
assert isinstance(size, int)
|
assert isinstance(size, int)
|
||||||
|
|
||||||
def wrap(source: bytes, resolver: HashResolver, inlined: _IList, uninlined: _UList) -> None:
|
def wrap(auto: Auto, inlined: _IList, uninlined: _UList) -> None:
|
||||||
assert isinstance(source, bytes)
|
assert isinstance(auto, Auto)
|
||||||
assert isinstance(resolver, HashResolver)
|
|
||||||
assert isinstance(inlined, list)
|
assert isinstance(inlined, list)
|
||||||
assert isinstance(uninlined, list)
|
assert isinstance(uninlined, list)
|
||||||
assert_eq(len(source), size)
|
assert_eq(len(auto.source), size)
|
||||||
inlined.append((index, factory.from_bytes(source, resolver)))
|
inlined.append((index, auto.as_mentionable(factory)))
|
||||||
|
|
||||||
return index, wrap, size
|
return index, wrap, size
|
||||||
|
|
||||||
@ -269,39 +377,35 @@ class IAutoFactory(Inlining[IAuto], Generic[_IAuto]):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _parse_affix(
|
def _parse_affix(
|
||||||
cls, source: bytes, resolver: HashResolver, affix: _SList
|
cls, auto: Auto, affix: _SList
|
||||||
) -> Iterable[tuple[int, Mentionable]]:
|
) -> Iterable[tuple[int, Mentionable]]:
|
||||||
assert isinstance(source, bytes)
|
assert isinstance(auto, Auto)
|
||||||
assert isinstance(resolver, HashResolver)
|
|
||||||
assert isinstance(affix, list)
|
assert isinstance(affix, list)
|
||||||
reader = BytesIO(source)
|
with auto:
|
||||||
for index, factory, size in affix:
|
for index, factory, size in affix:
|
||||||
yield index, factory.from_bytes(reader.read(size), resolver)
|
yield index, auto.next(size).as_mentionable(factory)
|
||||||
assert not reader.read()
|
|
||||||
|
|
||||||
def _parse_infix(self, source: bytes, resolver: HashResolver) -> Iterable[tuple[int, Mentionable]]:
|
def _parse_infix(self, auto: Auto) -> Iterable[tuple[int, Mentionable]]:
|
||||||
assert isinstance(source, bytes)
|
assert isinstance(auto, Auto)
|
||||||
assert isinstance(resolver, HashResolver)
|
yield from self._parse_affix(auto.sub(None, self.prefix_size), self.prefix)
|
||||||
yield from self._parse_affix(source[:self.prefix_size], resolver, self.prefix)
|
postfix_start = len(auto.source) - self.postfix_size
|
||||||
postfix_start = len(source) - self.postfix_size
|
yield self.infix_index, auto.sub(self.prefix_size, postfix_start).as_mentionable(self.infix_factory)
|
||||||
yield self.infix_index, self.infix_factory.from_bytes(source[self.prefix_size:postfix_start], resolver)
|
yield from self._parse_affix(auto.sub(postfix_start, None), self.postfix)
|
||||||
yield from self._parse_affix(source[postfix_start:], resolver, self.postfix)
|
|
||||||
|
|
||||||
def _parse_merged(self, source: bytes, resolver: HashResolver) -> _IAuto:
|
def _parse_merged(self, auto: Auto) -> _IAuto:
|
||||||
assert isinstance(source, bytes)
|
assert isinstance(auto, Auto)
|
||||||
assert isinstance(resolver, HashResolver)
|
|
||||||
reader = BytesIO(source)
|
|
||||||
inlined: _IList = []
|
inlined: _IList = []
|
||||||
uninlined: _UList = []
|
uninlined: _UList = []
|
||||||
for method, size in self.merged:
|
with auto:
|
||||||
method(reader.read(size), resolver, inlined, uninlined)
|
for method, size in self.merged:
|
||||||
assert not reader.read()
|
method(auto.next(size), inlined, uninlined)
|
||||||
return self.typed(inlined, uninlined)
|
return self.typed(inlined, uninlined)
|
||||||
|
|
||||||
def from_bytes(self, source: bytes, resolver: HashResolver) -> _IAuto:
|
def from_bytes(self, source: bytes, resolver: HashResolver) -> _IAuto:
|
||||||
assert isinstance(source, bytes)
|
assert isinstance(source, bytes)
|
||||||
assert isinstance(resolver, HashResolver)
|
assert isinstance(resolver, HashResolver)
|
||||||
|
auto = Auto(source, resolver)
|
||||||
if self.infix:
|
if self.infix:
|
||||||
return self.typed(list(self._parse_infix(source, resolver)), [])
|
return self.typed(list(self._parse_infix(auto)), [])
|
||||||
else:
|
else:
|
||||||
return self._parse_merged(source, resolver)
|
return self._parse_merged(auto)
|
||||||
|
38
rainbowadn/inlining/iref.py
Normal file
38
rainbowadn/inlining/iref.py
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
from typing import Generic, Iterable, Optional, TypeVar
|
||||||
|
|
||||||
|
from rainbowadn.core import *
|
||||||
|
from .inlining import *
|
||||||
|
|
||||||
|
__all__ = ('IRef',)
|
||||||
|
|
||||||
|
TRef = TypeVar('TRef')
|
||||||
|
|
||||||
|
|
||||||
|
class IRef(RecursiveMentionable, Generic[TRef]):
|
||||||
|
def points(self) -> Iterable[HashPoint]:
|
||||||
|
return [self.hashpoint]
|
||||||
|
|
||||||
|
def __bytes__(self):
|
||||||
|
return bytes(self.hashpoint)
|
||||||
|
|
||||||
|
def __factory__(self) -> RainbowFactory['IRef[TRef]']:
|
||||||
|
return IReff(self.hashpoint.factory)
|
||||||
|
|
||||||
|
def __init__(self, hashpoint: HashPoint[TRef]):
|
||||||
|
assert isinstance(hashpoint, HashPoint)
|
||||||
|
self.hashpoint = hashpoint
|
||||||
|
|
||||||
|
|
||||||
|
class IReff(Inlining[IRef[TRef]], Generic[TRef]):
|
||||||
|
def __init__(self, factory: RainbowFactory[TRef]):
|
||||||
|
assert isinstance(factory, RainbowFactory)
|
||||||
|
self.factory = factory
|
||||||
|
|
||||||
|
def size(self) -> Optional[int]:
|
||||||
|
return HashPoint.HASH_LENGTH
|
||||||
|
|
||||||
|
def from_bytes(self, source: bytes, resolver: HashResolver) -> IRef[TRef]:
|
||||||
|
assert_eq(len(source), HashPoint.HASH_LENGTH)
|
||||||
|
return IRef(
|
||||||
|
ResolverMetaOrigin(resolver).hash_point(self.factory, source)
|
||||||
|
)
|
@ -10,18 +10,18 @@ class ClassReport:
|
|||||||
self.report: set[tuple[str, bytes, bytes]] = set()
|
self.report: set[tuple[str, bytes, bytes]] = set()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _tuple(cls, mentionable: Mentionable) -> tuple[str, bytes, bytes]:
|
def key(cls, mentionable: Mentionable) -> tuple[str, bytes, bytes]:
|
||||||
return type(mentionable).__name__, bytes(HashPoint.of(mentionable)), bytes(mentionable)
|
return type(mentionable).__name__, bytes(HashPoint.of(mentionable)), bytes(mentionable)
|
||||||
|
|
||||||
def _cointains(self, mentionable: Mentionable) -> bool:
|
def _cointains(self, mentionable: Mentionable) -> bool:
|
||||||
return self._tuple(mentionable) in self.report
|
return self.key(mentionable) in self.report
|
||||||
|
|
||||||
async def _recurse(self, recursive: RecursiveMentionable):
|
async def _recurse(self, recursive: RecursiveMentionable):
|
||||||
for mentioned in await gather(*map(HashPoint.resolve, recursive.points())):
|
for mentioned in await gather(*map(HashPoint.resolve, recursive.points())):
|
||||||
await self.walk(mentioned)
|
await self.walk(mentioned)
|
||||||
|
|
||||||
def _contain(self, mentionable: Mentionable) -> None:
|
def _contain(self, mentionable: Mentionable) -> None:
|
||||||
self.report.add(self._tuple(mentionable))
|
self.report.add(self.key(mentionable))
|
||||||
|
|
||||||
async def _save(self, mentionable: Mentionable):
|
async def _save(self, mentionable: Mentionable):
|
||||||
self._contain(mentionable)
|
self._contain(mentionable)
|
||||||
@ -33,8 +33,12 @@ class ClassReport:
|
|||||||
return
|
return
|
||||||
await self._save(mentionable)
|
await self._save(mentionable)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def format_key(cls, key: tuple[str, bytes, bytes]) -> str:
|
||||||
|
return f'{key[0]:<32}:{key[1].hex()}:{key[2].hex()}'
|
||||||
|
|
||||||
def format(self) -> str:
|
def format(self) -> str:
|
||||||
s = StringIO()
|
s = StringIO()
|
||||||
for type_, point, bytes_ in sorted(self.report):
|
for key in sorted(self.report):
|
||||||
s.write(f'{type_:<32}:{point.hex()}:{bytes_.hex()}\n')
|
s.write(f'{self.format_key(key)}\n')
|
||||||
return s.getvalue()
|
return s.getvalue()
|
||||||
|
@ -69,6 +69,10 @@ class NullableReference(RecursiveMentionable, Generic[Referenced]):
|
|||||||
assert not self.null()
|
assert not self.null()
|
||||||
return await self.hashpoint().resolve()
|
return await self.hashpoint().resolve()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def f(cls, factory: RainbowFactory[Referenced]) -> RainbowFactory['NullableReference[Referenced]']:
|
||||||
|
return NullableReferenceFactory(factory)
|
||||||
|
|
||||||
|
|
||||||
class NullableReferenceFactory(Inlining[NullableReference[Referenced]], Generic[Referenced]):
|
class NullableReferenceFactory(Inlining[NullableReference[Referenced]], Generic[Referenced]):
|
||||||
def __init__(self, factory: RainbowFactory[Referenced]):
|
def __init__(self, factory: RainbowFactory[Referenced]):
|
||||||
@ -87,6 +91,3 @@ class NullableReferenceFactory(Inlining[NullableReference[Referenced]], Generic[
|
|||||||
return NullableReference.of(
|
return NullableReference.of(
|
||||||
ResolverMetaOrigin(resolver).hash_point(self.factory, source)
|
ResolverMetaOrigin(resolver).hash_point(self.factory, source)
|
||||||
)
|
)
|
||||||
|
|
||||||
def loose(self) -> Inlining[NullableReference[Referenced]]:
|
|
||||||
return self
|
|
||||||
|
@ -5,5 +5,4 @@ __all__ = ('default_resolver',)
|
|||||||
|
|
||||||
def default_resolver():
|
def default_resolver():
|
||||||
dr = DictResolver()
|
dr = DictResolver()
|
||||||
# dr = DelayedResolver(dr, lambda: 0.000)
|
|
||||||
return dr
|
return dr
|
||||||
|
@ -3,13 +3,16 @@ import os
|
|||||||
import random
|
import random
|
||||||
import shutil
|
import shutil
|
||||||
import time
|
import time
|
||||||
|
from io import StringIO
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from rainbowadn.core import *
|
from rainbowadn.core import *
|
||||||
from rainbowadn.instrument import *
|
from rainbowadn.instrument import *
|
||||||
from rainbowadn.testing.resolvers import *
|
from rainbowadn.testing.resolvers import *
|
||||||
|
|
||||||
__all__ = ('get_dr', 'target_str', 'jsonify', 'get_fn', 'jsonify_list', 'dump', 'copy', 'DeintrumentationSize',)
|
__all__ = (
|
||||||
|
'get_dr', 'target_str', 'jsonify', 'get_fn', 'jsonify_list', 'dump', 'copy', 'DeintrumentationSize', 'Resolution'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def get_dr(mean_delay: float, caching: bool) -> ExtendableResolver:
|
def get_dr(mean_delay: float, caching: bool) -> ExtendableResolver:
|
||||||
@ -77,3 +80,23 @@ class DeintrumentationSize(Instrumentation):
|
|||||||
len(self.deinstrumentation)
|
len(self.deinstrumentation)
|
||||||
)
|
)
|
||||||
return method(*args, **kwargs)
|
return method(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class Resolution(Instrumentation):
|
||||||
|
def __init__(self):
|
||||||
|
self.report: dict[tuple[str, bytes, bytes], int] = {}
|
||||||
|
super().__init__(HashPoint, 'resolve')
|
||||||
|
|
||||||
|
def increment(self, key: tuple[str, bytes, bytes]):
|
||||||
|
self.report[key] = self.report.get(key, 0) + 1
|
||||||
|
|
||||||
|
async def instrument(self, method, *args, **kwargs):
|
||||||
|
result: Mentionable = await method(*args, **kwargs)
|
||||||
|
self.increment(ClassReport.key(result))
|
||||||
|
return result
|
||||||
|
|
||||||
|
def format(self) -> str:
|
||||||
|
s = StringIO()
|
||||||
|
for key, count in sorted(self.report.items(), key=lambda t: (-t[1], t[0])):
|
||||||
|
s.write(f'{count: 6d}:{ClassReport.format_key(key)}\n')
|
||||||
|
return s.getvalue()
|
||||||
|
@ -118,6 +118,14 @@ async def _report(bank: BankBlock):
|
|||||||
print('reported')
|
print('reported')
|
||||||
|
|
||||||
|
|
||||||
|
async def _preprocess(bank: BankBlock):
|
||||||
|
with Resolution() as resolution:
|
||||||
|
await _process(bank)
|
||||||
|
with open('trace/latest-resolution.txt', 'w') as file:
|
||||||
|
file.write(resolution.format())
|
||||||
|
print('preprocessed')
|
||||||
|
|
||||||
|
|
||||||
async def _trace(params):
|
async def _trace(params):
|
||||||
set_gather_linear()
|
set_gather_linear()
|
||||||
bank = await _generate(
|
bank = await _generate(
|
||||||
@ -125,8 +133,10 @@ async def _trace(params):
|
|||||||
*params['subjects'],
|
*params['subjects'],
|
||||||
*params['transactions'],
|
*params['transactions'],
|
||||||
)
|
)
|
||||||
await _report(bank)
|
if params['meta']:
|
||||||
await _process(bank)
|
await _report(bank)
|
||||||
|
await _process(bank)
|
||||||
|
await _preprocess(bank)
|
||||||
bank = await _migrate(bank, params)
|
bank = await _migrate(bank, params)
|
||||||
set_gather_asyncio()
|
set_gather_asyncio()
|
||||||
with DeintrumentationSize(Instrumentation, 'deinstrument'):
|
with DeintrumentationSize(Instrumentation, 'deinstrument'):
|
||||||
@ -147,8 +157,10 @@ async def trace(params):
|
|||||||
print('plotted')
|
print('plotted')
|
||||||
|
|
||||||
|
|
||||||
preset_long = dict(blocks=64, subjects=(4, 8), transactions=(8, 16), caching=True, delay=.5)
|
preset_default = dict(caching=True, delay=.5, meta=False, subjects=(4, 8), transactions=(8, 16))
|
||||||
preset_short = dict(blocks=16, subjects=(4, 8), transactions=(8, 16), caching=True, delay=.5)
|
preset_extra = preset_default | dict(blocks=256)
|
||||||
|
preset_long = preset_default | dict(blocks=64)
|
||||||
|
preset_short = preset_default | dict(blocks=16)
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
random.seed(659918)
|
random.seed(659918)
|
||||||
|
Loading…
Reference in New Issue
Block a user