remove stack + fix integer parsing

This commit is contained in:
AF 2022-09-07 23:10:23 +03:00
parent 2aa07d7f10
commit 11ace6bc5b
7 changed files with 3 additions and 271 deletions

View File

@ -12,6 +12,8 @@ class Integer(Atomic):
@classmethod
def _from_bytes(cls, source: bytes) -> 'Integer':
assert isinstance(source, bytes)
if source:
assert source[-1] > 0
return cls(int.from_bytes(source, 'little'))
def __bytes__(self):

View File

@ -1,9 +1,7 @@
__all__ = (
'Array', 'ArrayFactory',
'Stack', 'StackFactory',
'TLRoot', 'TLRootFactory', 'TLRParametres',
)
from .array import Array, ArrayFactory
from .stack import Stack, StackFactory
from .treelist import TLRParametres, TLRoot, TLRootFactory

View File

@ -1,126 +0,0 @@
from typing import AsyncIterable, Generic, Iterable, TypeVar
from rainbowadn.core import *
from rainbowadn.nullability import *
__all__ = ('Stack', 'StackFactory',)
ElementType = TypeVar('ElementType')
class Stack(RecursiveMentionable, Generic[ElementType]):
def __factory__(self) -> RainbowFactory['Stack[ElementType]']:
return self.factory(self.element.factory)
@classmethod
def factory(cls, factory: RainbowFactory[ElementType]) -> RainbowFactory['Stack[ElementType]']:
assert isinstance(factory, RainbowFactory)
return StackFactory(factory)
def __init__(self, previous: NullableReference['Stack[ElementType]'], element: HashPoint[ElementType]):
assert isinstance(previous, NullableReference)
assert isinstance(element, HashPoint)
self.previous = previous
self.element = element
def points(self) -> Iterable[HashPoint]:
return [*self.previous.points(), self.element]
def __bytes__(self):
return bytes(self.previous) + bytes(self.element)
async def str(self, tab: int) -> str:
assert isinstance(tab, int)
previous_str, element_str = await gather(
self.previous.str(tab),
hash_point_format(self.element, tab),
)
assert isinstance(previous_str, str)
assert isinstance(element_str, str)
return f'{previous_str}' \
f'{tabulate(tab)}{element_str}'
@classmethod
def of(
cls,
factory: RainbowFactory[ElementType],
elements: Iterable[HashPoint[ElementType]]
) -> NullableReference[
'Stack[ElementType]'
]:
assert isinstance(factory, RainbowFactory)
reference: NullableReference[Stack[ElementType]] = NullableReference(Null(), cls.factory(factory))
assert isinstance(reference, NullableReference)
for element in elements:
assert isinstance(element, HashPoint)
reference = NullableReference.off(cls(reference, element))
assert isinstance(reference, NullableReference)
return reference
@classmethod
def off(
cls,
factory: RainbowFactory[ElementType],
elements: Iterable[ElementType]
) -> NullableReference[
'Stack[ElementType]'
]:
assert isinstance(factory, RainbowFactory)
return cls.of(factory, map(HashPoint.of, elements))
@classmethod
async def iter(
cls,
reference: NullableReference['Stack[ElementType]']
) -> AsyncIterable[HashPoint[ElementType]]:
assert isinstance(reference, NullableReference)
async for stack in cls.iter_stacks(reference):
yield stack.element
@classmethod
async def iter_stacks(
cls,
reference: NullableReference['Stack[ElementType]']
) -> AsyncIterable['Stack[ElementType]']:
assert isinstance(reference, NullableReference)
if reference.null():
pass
else:
stack: Stack[ElementType] = await reference.resolve()
assert isinstance(stack, Stack)
yield stack
async for stack in cls.iter_stacks(stack.previous):
yield stack
@classmethod
async def list(
cls,
reference: NullableReference['Stack[ElementType]']
) -> list[ElementType]:
assert isinstance(reference, NullableReference)
return list(
await gather(
*[element.resolve() async for element in cls.iter(reference)]
)
)
@classmethod
def unit(cls, element: HashPoint[ElementType]) -> 'Stack[ElementType]':
return Stack(NullableReference(Null(), StackFactory(element.factory)), element)
class StackFactory(RainbowFactory[Stack[ElementType]], Generic[ElementType]):
def __init__(self, factory: RainbowFactory[ElementType]):
assert isinstance(factory, RainbowFactory)
self.factory = factory
def from_bytes(self, source: bytes, resolver: HashResolver) -> Stack[ElementType]:
assert isinstance(source, bytes)
assert isinstance(resolver, HashResolver)
return Stack(
NullableReference.f(self).from_bytes(source[:HashPoint.HASH_LENGTH], resolver),
ResolverOrigin(self.factory, source[HashPoint.HASH_LENGTH:], resolver).hash_point()
)
def loose(self) -> RainbowFactory[Stack[ElementType]]:
return self

View File

@ -4,8 +4,6 @@ todo: deprecate
__all__ = (
'ListBridge',
'StackBridge',
)
from ._listbridge import ListBridge
from ._stackbridge import StackBridge

View File

@ -1,60 +0,0 @@
from typing import Generic, TypeVar
from rainbowadn.collection.linear import *
from rainbowadn.core import *
from rainbowadn.flow.core import *
from rainbowadn.flow.sequence import *
from ._listbridge import *
__all__ = ('StackBridge',)
ElementT = TypeVar('ElementT')
Out = TypeVar('Out')
class StackBridge(
Reducer[SequenceDispatcher[Stack[ElementT], Out], Out],
Generic[Out, ElementT]
):
def __init__(self, target: HashPoint[Stack[ElementT]]):
assert isinstance(target, HashPoint)
self.target = target
async def reduce(self, reduce: Reduce[SequenceDispatcher[Stack[ElementT], Out], Out]) -> Out:
resolved: Stack[ElementT] = await self.target.resolve()
dispatchers: list[SequenceDispatcher[Stack[ElementT], Out]] = [LastDispatcher(resolved)]
deepest: Stack[ElementT] = resolved
stack: Stack[ElementT]
for stack in [x async for x in Stack.iter_stacks(resolved.previous)]:
assert isinstance(stack, Stack)
dispatchers.append(PairDispatcher(stack, deepest))
deepest = stack
dispatchers.append(FirstDispatcher(deepest))
bridge: Reducer[SequenceDispatcher[Stack[ElementT], Out], Out] = ListBridge(dispatchers)
assert isinstance(bridge, Reducer)
return await bridge.reduce(reduce)
def loose(self) -> Reducer[SequenceDispatcher[Stack[ElementT], Out], Out]:
return self
@classmethod
def mapper(cls) -> Mapper[
HashPoint[Stack[ElementT]],
Reducer[SequenceDispatcher[Stack[ElementT], Out], Out]
]:
return CallableMapper(cls)
@classmethod
def element_of(cls, stack: Stack[ElementT]) -> HashPoint[ElementT]:
return stack.element
@classmethod
def element_mapper(cls) -> Mapper[Stack[ElementT], HashPoint[ElementT]]:
return CallableMapper(cls.element_of)
def over_elements(self) -> Reducer[SequenceDispatcher[HashPoint[ElementT], Out], Out]:
cdm: Mapper[
SequenceDispatcher[Stack[ElementT], None],
SequenceDispatcher[HashPoint[ElementT], None]
] = CompositionDispatcher.mapper(self.element_mapper())
return MapReducer(cdm, self)

View File

@ -2,100 +2,20 @@ import os
import random
import time
import unittest
from typing import Any
from nacl.signing import SigningKey
from rainbowadn.atomic import *
from rainbowadn.collection.comparison import *
from rainbowadn.collection.linear import *
from rainbowadn.collection.trees.binary import *
from rainbowadn.core import *
from rainbowadn.flow.bridge import *
from rainbowadn.flow.core import *
from rainbowadn.flow.primitive import *
from rainbowadn.flow.sequence import *
from rainbowadn.flow.stacked import *
from rainbowadn.flow13 import *
from rainbowadn.nullability import *
from rainbowadn.v13 import *
class PrintDispatch(SequenceDispatch[HashPoint, None]):
async def on_first(self, element: HashPoint) -> None:
print('first', await hash_point_format(element, 0))
async def on_last(self, element: HashPoint) -> None:
print('last', await hash_point_format(element, 0))
async def on_pair(self, previous: HashPoint, element: HashPoint) -> None:
print('pair', await hash_point_format(previous, 0), await hash_point_format(element, 0))
def loose(self) -> SequenceDispatch[HashPoint, None]:
return self
class NoneReduce(Reduce[None, None]):
def __init__(self):
super().__init__(None)
async def reduce(self, out: None, element: None) -> None:
return None
def merge(self, left: None, right: None) -> None:
return None
class PrintReduce(Reduce[tuple[Nullable[HashPoint], HashPoint], None]):
def __init__(self):
super().__init__(None)
async def reduce(self, out: None, element: tuple[Nullable[HashPoint], HashPoint]) -> None:
nullable_, hashpoint_ = element
print(
'reduce',
'-' if nullable_.null() else await hash_point_format(nullable_.resolve(), 0),
await hash_point_format(hashpoint_, 0)
)
return None
def merge(self, left: None, right: None) -> None:
return None
def loose(self) -> Reduce[tuple[Nullable[HashPoint], HashPoint], None]:
return self
class TestBridge(unittest.IsolatedAsyncioTestCase):
@classmethod
def element_of(cls, stack: Stack[Plain]) -> HashPoint[Plain]:
return stack.element
@classmethod
def element_mapper(cls) -> Mapper[Stack[Plain], HashPoint[Plain]]:
return CallableMapper(cls.element_of)
@classmethod
async def bridge(cls) -> Reducer[SequenceDispatcher[HashPoint[Plain], Any], Any]:
hp: HashPoint[Stack[Plain]] = Stack.off(Plain.factory(), [Plain(b'A'), Plain(b'B'), Plain(b'C')]).hashpoint()
print(await hash_point_format(hp, 0))
bridge = StackBridge(hp).over_elements()
return bridge
async def test_stack_bridge(self):
set_gather_linear()
bridge = await self.bridge()
assert_none_strict(
await bridge.reduce(MapReduce(DispatchMapper(PrintDispatch().loose()).loose(), NoneReduce()).loose())
)
async def test_stacked(self):
set_gather_linear()
bridge = await self.bridge()
assert_none_strict(
await StackedReducer(bridge).loose().reduce(PrintReduce().loose())
)
async def test_iterator(self):
set_gather_linear()
bridge = ListBridge(list(range(13)))

View File

@ -167,7 +167,7 @@ if __name__ == '__main__':
try:
asyncio.run(
trace(
preset_long
preset_short
)
)
except KeyboardInterrupt: