Skip to content

Commit

Permalink
Revert "Changed Union to | in typehints."
Browse files Browse the repository at this point in the history
This reverts commit e131ccb.
  • Loading branch information
mrucker committed Aug 6, 2023
1 parent af30f8e commit a79c254
Show file tree
Hide file tree
Showing 28 changed files with 205 additions and 204 deletions.
16 changes: 8 additions & 8 deletions coba/context/cachers.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from collections.abc import Iterator
from collections import defaultdict
from abc import abstractmethod, ABC
from typing import TypeVar, Iterable, Optional, Callable, Generic, Sequence, ContextManager, Dict
from typing import Union, Dict, TypeVar, Iterable, Optional, Callable, Generic, Sequence, ContextManager

from coba.exceptions import CobaException

Expand All @@ -33,7 +33,7 @@ def rmv(self, key: _K) -> None:
...

@abstractmethod
def get_set(self, key: _K, getter: Callable[[], _V]|_V) -> ContextManager[_V]:
def get_set(self, key: _K, getter: Union[Callable[[], _V],_V]) -> ContextManager[_V]:
"""Get a key from the cache.
If the key is not in the cache put it first using getter.
Expand All @@ -57,7 +57,7 @@ def __contains__(self, key: _K) -> bool:
def rmv(self, key: _K):
pass

def get_set(self, key: _K, getter: Callable[[], _V]|_V) -> ContextManager[_V]:
def get_set(self, key: _K, getter: Union[Callable[[], _V],_V]) -> ContextManager[_V]:
return nullcontext(getter())

class MemoryCacher(Cacher[_K, _V]):
Expand All @@ -74,7 +74,7 @@ def rmv(self, key: _K) -> None:
if key in self:
del self._cache[key]

def get_set(self, key: _K, getter: Callable[[],_V]|_V) -> ContextManager[_V]:
def get_set(self, key: _K, getter: Union[Callable[[], _V],_V]) -> ContextManager[_V]:
if key not in self:
value = getter() if callable(getter) else getter
value = list(value) if isinstance(value,Iterator) else value
Expand All @@ -88,7 +88,7 @@ class DiskCacher(Cacher[str, Iterable[str]]):
The DiskCacher compresses all values before writing to conserve disk space.
"""

def __init__(self, cache_dir: str|Path|None = None) -> None:
def __init__(self, cache_dir: Union[str, Path] = None) -> None:
"""Instantiate a DiskCacher.
Args:
Expand All @@ -102,7 +102,7 @@ def cache_directory(self) -> Optional[str]:
return str(self._cache_dir) if self._cache_dir is not None else None

@cache_directory.setter
def cache_directory(self,value:Path|str|None) -> None:
def cache_directory(self,value:Union[Path,str,None]) -> None:
self._cache_dir = value if isinstance(value, Path) else Path(value).expanduser() if value else None

def __contains__(self, key: str) -> bool:
Expand All @@ -111,7 +111,7 @@ def __contains__(self, key: str) -> bool:
def rmv(self, key: str) -> None:
if self._cache_path(key).exists(): self._cache_path(key).unlink()

def get_set(self, key: str, getter: Callable[[], Iterable[str]]|Iterable[str]) -> ContextManager[Iterable[str]]:
def get_set(self, key: str, getter: Union[Callable[[], Iterable[str]],Iterable[str]]) -> ContextManager[Iterable[str]]:

if self._cache_dir is None:
return nullcontext(getter())
Expand Down Expand Up @@ -181,7 +181,7 @@ def rmv(self, key: _K):
if lock == 'write': self._release_write_lock(key)
raise

def get_set(self, key: _K, getter: Callable[[],_V]|_V) -> ContextManager[_V]:
def get_set(self, key: _K, getter: Union[Callable[[],_V],_V]) -> ContextManager[_V]:

try:
self._acquire_read_lock(key)
Expand Down
6 changes: 3 additions & 3 deletions coba/context/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import traceback

from pathlib import Path
from typing import Iterable, Dict, Any, Sequence, Literal
from typing import Iterable, Dict, Any, Sequence, Union, Literal

from coba.exceptions import CobaException
from coba.registry import JsonMakerV1, CobaRegistry
Expand Down Expand Up @@ -158,7 +158,7 @@ def cacher(cls) -> Cacher[str,Iterable[bytes]]:
return cls._cacher

@cacher.setter
def cacher(cls, value: Cacher|str) -> None:
def cacher(cls, value: Union[Cacher,str]) -> None:
cls._cacher = value

@property
Expand Down Expand Up @@ -195,7 +195,7 @@ def search_paths(cls) -> Sequence[Path]:
return cls._search_paths

@search_paths.setter
def search_paths(cls, value:Sequence[str|Path]) -> None:
def search_paths(cls, value:Sequence[Union[str,Path]]) -> None:
cls._search_paths = [ Path(path) if isinstance(path,str) else path for path in value ]

@property
Expand Down
16 changes: 8 additions & 8 deletions coba/context/loggers.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from multiprocessing import current_process
from contextlib import contextmanager, nullcontext
from datetime import datetime
from typing import ContextManager, Iterator, Sequence
from typing import ContextManager, Iterator, Sequence, Union
from copy import copy

from coba.pipes import Pipes, Filter, Sink, NullSink, ConsoleSink, Identity
Expand All @@ -26,7 +26,7 @@ def sink(self, sink: Sink[str]):
...

@abstractmethod
def log(self, message: str|Exception) -> 'ContextManager[Logger]':
def log(self, message: Union[str,Exception]) -> 'ContextManager[Logger]':
"""Log a message or exception to the sink.
Args:
Expand Down Expand Up @@ -68,7 +68,7 @@ def sink(self) -> Sink[str]:
def sink(self, sink: Sink[str]):
self._sink = sink

def log(self, message: str|Exception) -> 'ContextManager[Logger]':
def log(self, message: Union[str,Exception]) -> 'ContextManager[Logger]':
return nullcontext(self)

def time(self, message: str) -> 'ContextManager[Logger]':
Expand Down Expand Up @@ -198,7 +198,7 @@ def sink(self) -> Sink[str]:
def sink(self, sink: Sink[str]):
self._sink = sink

def log(self, message: str|Exception) -> 'ContextManager[Logger]':
def log(self, message: Union[str,Exception]) -> 'ContextManager[Logger]':
if self._messages:
self._messages.append(self._level_message(message))
else:
Expand Down Expand Up @@ -229,7 +229,7 @@ def sink(self) -> Sink[str]:
def sink(self, sink: Sink[str]):
self._sink = sink

def log(self, message: str|Exception) -> 'ContextManager[Logger]':
def log(self, message: Union[str,Exception]) -> 'ContextManager[Logger]':
if isinstance(message,Exception):
self._sink.write(self._filter.filter(message))
return nullcontext(self)
Expand Down Expand Up @@ -264,7 +264,7 @@ def sink(self, sink: Sink[str]):
self._original_logger.sink = sink
self._copy_logger.sink = Pipes.join(*self._post_decorators, sink)

def log(self, message: str|Exception) -> 'ContextManager[Logger]':
def log(self, message: Union[str,Exception]) -> 'ContextManager[Logger]':
return self._copy_logger.log(self._pre_decorator.filter(message))

def time(self, message: str) -> 'ContextManager[Logger]':
Expand All @@ -288,10 +288,10 @@ def filter(self, log: str) -> str:
def _now(self)-> datetime:
return datetime.now()

class ExceptLog(Filter[str|Exception,str]):
class ExceptLog(Filter[Union[str,Exception],str]):
"""A Log decorator that turns exceptions into messages."""

def filter(self, log: str|Exception) -> str:
def filter(self, log: Union[str,Exception]) -> str:
if isinstance(log, str):
return log
elif isinstance(log, CobaException):
Expand Down
14 changes: 7 additions & 7 deletions coba/encodings.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from abc import ABC, abstractmethod
from collections import Counter, OrderedDict, defaultdict
from itertools import count, accumulate, chain
from typing import Iterator, Sequence, Generic, TypeVar, Any, Tuple, Mapping
from typing import Iterator, Sequence, Generic, TypeVar, Any, Tuple, Union, Mapping

from coba.exceptions import CobaException
from coba.primitives import Sparse, Dense, Categorical
Expand Down Expand Up @@ -320,7 +320,7 @@ class CobaJsonDecoder(json.JSONDecoder):

class InteractionsEncoder:

def __init__(self, interactions: Sequence[str|float]) -> None:
def __init__(self, interactions: Sequence[Union[str,float]]) -> None:
str_interactions = [i for i in interactions if isinstance(i,str) ]
num_interactions = [i for i in interactions if isinstance(i,Number)]

Expand All @@ -330,7 +330,7 @@ def __init__(self, interactions: Sequence[str|float]) -> None:
self._cross_pows = OrderedDict(zip(interactions,map(OrderedDict,map(Counter,str_interactions))))
self._ns_max_pow = { n:int(max(p.get(n,0) for p in self._cross_pows.values())) for n in set(''.join(str_interactions)) }

def encode(self, **ns_raw_values: str|float|Sequence[str|float]|Mapping[str|int,str|float]) -> Sequence[float]|Mapping[str,float]:
def encode(self, **ns_raw_values: Union[str, float, Sequence[Union[str,float]], Mapping[Union[str,int],Union[str,float]]]) -> Union[Sequence[float], Mapping[str,float]]:

self.n+= 1

Expand All @@ -345,13 +345,13 @@ def encode(self, **ns_raw_values: str|float|Sequence[str|float]|Mapping[str|int,

is_sparse = any(is_sparse_type(v) or is_sparse_sequ(v) for v in ns_raw_values.values())

def make_dict(v) -> Mapping[str,str|float]:
def make_dict(v) -> Mapping[str,Union[str,float]]:
return v if is_map(v) else dict(zip(map(str,count()),v)) if is_seq(v) else { "0":v }

def make_list(v) -> Sequence[str|float]:
def make_list(v) -> Sequence[Union[str,float]]:
return v if is_seq(v) else [v]

def handle_str(v: Mapping[str,str|float]) -> Mapping[str,float]:
def handle_str(v: Mapping[str,Union[str,float]]) -> Mapping[str,float]:
return { (f"{x}{y}" if is_str(y) else x):(1 if is_str(y) else y) for x,y in v.items() }

start = time.time()
Expand Down Expand Up @@ -400,7 +400,7 @@ def handle_str(v: Mapping[str,str|float]) -> Mapping[str,float]:

return encoded

def _pows(self, values: Sequence[str|float], degree):
def _pows(self, values: Sequence[Union[str,float]], degree):
#WARNING: This function has been extremely optimized. Please baseline performance before and after making any changes.
#WARNING: You can find three existing performance tests in test_performance.

Expand Down
34 changes: 17 additions & 17 deletions coba/environments/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

from zipfile import ZipFile, BadZipFile
from pathlib import Path
from typing import Union, Sequence, overload, Iterable, Iterator, Any, Optional, Tuple, Callable, Mapping, Type, Literal
from typing import Sequence, overload, Union, Iterable, Iterator, Any, Optional, Tuple, Callable, Mapping, Type, Literal

from coba import pipes
from coba.context import CobaContext, DiskCacher, DecoratedLogger, ExceptLog, NameLog, StampLog
Expand Down Expand Up @@ -33,7 +33,7 @@ class Environments(collections.abc.Sequence, Sequence[Environment]):
"""A friendly wrapper around commonly used environment functionality."""

@staticmethod
def cache_dir(path:str|Path) -> Type['Environments']:
def cache_dir(path:Union[str,Path]) -> Type['Environments']:
CobaContext.cacher = DiskCacher(path)
return Environments

Expand Down Expand Up @@ -87,7 +87,7 @@ def from_linear_synthetic(
n_context_features: int = 5,
n_action_features: int = 5,
reward_features: Sequence[str] = ["a","xa"],
seed: int|Sequence[int] = 1) -> 'Environments':
seed: Union[int,Sequence[int]] = 1) -> 'Environments':
"""A synthetic simulation whose rewards are linear with respect to the given reward features.
The simulation's rewards are determined via a linear function with respect to the given reward features. When
Expand Down Expand Up @@ -129,7 +129,7 @@ def from_kernel_synthetic(
kernel: Literal['linear','polynomial','exponential','gaussian'] = 'gaussian',
degree: int = 3,
gamma: float = 1,
seed: int|Sequence[int] = 1) -> 'Environments':
seed: Union[int,Sequence[int]] = 1) -> 'Environments':
"""A synthetic simulation whose reward function is created from kernel basis functions."""

seed = [seed] if not isinstance(seed,collections.abc.Sequence) else seed
Expand Down Expand Up @@ -157,7 +157,7 @@ def from_mlp_synthetic(

@overload
@staticmethod
def from_openml(data_id: int|Sequence[int],
def from_openml(data_id: Union[int,Sequence[int]],
drop_missing: bool = True,
take: int = None,
*,
Expand All @@ -167,7 +167,7 @@ def from_openml(data_id: int|Sequence[int],

@overload
@staticmethod
def from_openml(*,task_id: int|Sequence[int],
def from_openml(*,task_id: Union[int,Sequence[int]],
drop_missing: bool = True,
take: int = None,
target:str = None,
Expand Down Expand Up @@ -195,7 +195,7 @@ def from_openml(*args,**kwargs) -> 'Environments':
@staticmethod
def from_supervised(
source: Source,
label_col: int|str = None,
label_col: Union[int,str] = None,
label_type: Literal["C","R"] = "C",
take: int = None) -> 'Environments':
"""Create a SimulatedEnvironment from a supervised dataset"""
Expand Down Expand Up @@ -248,7 +248,7 @@ def from_lambda(*args,**kwargs) -> 'Environments':
def from_dataframe(df) -> 'Environments':
return Environments(Pipes.join(DataFrameSource(df), MappingToInteraction()))

def __init__(self, *environments: Environment|Sequence[Environment]):
def __init__(self, *environments: Union[Environment, Sequence[Environment]]):
"""Instantiate an Environments class.
Args:
Expand Down Expand Up @@ -309,7 +309,7 @@ def shuffle(self, *args,**kwargs) -> 'Environments':

return Environments(ordered)

def sort(self, *keys: str|int|Sequence[str|int]) -> 'Environments':
def sort(self, *keys: Union[str,int,Sequence[Union[str,int]]]) -> 'Environments':
"""Sort Environment interactions according to the context values indicated by keys."""
return self.filter(Sort(*keys))

Expand All @@ -334,22 +334,22 @@ def slice(self, start: Optional[int], stop: Optional[int]=None, step:int = 1) ->
"""Take a slice of interactions from an Environment."""
return self.filter(Slice(start,stop,step))

def reservoir(self, n_interactions: int, seeds: int|Sequence[int]=1) -> 'Environments':
def reservoir(self, n_interactions: int, seeds: Union[int,Sequence[int]]=1) -> 'Environments':
"""Take a random fixed number of interactions from the Environments."""
if isinstance(seeds,int): seeds = [seeds]
return self.filter([Reservoir(n_interactions,seed=seed) for seed in seeds])

def scale(self,
shift: float|Literal["min","mean","med"] = "min",
scale: float|Literal["minmax","std","iqr","maxabs"] = "minmax",
targets: Literal["context"] | Sequence[Literal["context"]] = "context",
shift: Union[float,Literal["min","mean","med"]] = "min",
scale: Union[float,Literal["minmax","std","iqr","maxabs"]] = "minmax",
targets: Union[Literal["context","ope_rewards","argmax"], Sequence[Literal["context","ope_rewards","argmax"]]] = "context",
using: Optional[int] = None) -> 'Environments':
"""Apply an affine shift and scaling factor to precondition environments."""
if isinstance(targets,str): targets = [targets]
return self.filter(Pipes.join(*[Scale(shift, scale, t, using) for t in targets]))

def impute(self,
stats: Literal["mean","median","mode"]|Sequence[Literal["mean","median","mode"]] = "mean",
stats: Union[Literal["mean","median","mode"],Sequence[Literal["mean","median","mode"]]] = "mean",
indicator:bool = True,
using: Optional[int] = None) -> 'Environments':
"""Impute missing values with a feature statistic using a given number of interactions."""
Expand All @@ -359,7 +359,7 @@ def impute(self,
envs = self.filter(Impute(stat, indicator, using))
return envs

def where(self,*,n_interactions: int|Tuple[Optional[int],Optional[int]] = None) -> 'Environments':
def where(self,*,n_interactions: Union[int,Tuple[Optional[int],Optional[int]]] = None) -> 'Environments':
"""Only include environments which satisify the given requirements."""
return self.filter(Where(n_interactions=n_interactions))

Expand Down Expand Up @@ -410,7 +410,7 @@ def chunk(self, cache:bool = True) -> 'Environments':
envs = Environments([Pipes.join(env, Chunk()) for env in self])
return envs.cache() if cache else envs

def logged(self, learners: Learner|Sequence[Learner], seed:Optional[float] = 1.23) -> 'Environments':
def logged(self, learners: Union[Learner,Sequence[Learner]], seed:Optional[float] = 1.23) -> 'Environments':
"""Create a logged environment using the given learner for the logging policy."""
if not isinstance(learners, collections.abc.Sequence): learners = [learners]
return self.filter(BatchSafe(Finalize())).filter([Logged(learner, seed) for learner in learners ])
Expand Down Expand Up @@ -467,7 +467,7 @@ def cache(self) -> 'Environments':
"""Create a cache point in the environments so that earlier steps in the pipeline can be re-used in several pipes."""
return Environments([Pipes.join(env, Cache(25)) for env in self])

def filter(self, filter: EnvironmentFilter|Sequence[EnvironmentFilter]) -> 'Environments':
def filter(self, filter: Union[EnvironmentFilter,Sequence[EnvironmentFilter]]) -> 'Environments':
"""Apply filters to each environment currently in Environments."""
filters = filter if isinstance(filter, collections.abc.Sequence) else [filter]
return Environments([Pipes.join(e,f) for e in self._environments for f in filters])
Expand Down
Loading

0 comments on commit a79c254

Please sign in to comment.