How to use override method in ng-mocks

Best JavaScript code snippet using ng-mocks

client.pyi

Source:client.pyi Github

copy

Full Screen

1from datetime import datetime, timedelta2from typing import (3 Any,4 Callable,5 Dict,6 Generic,7 Iterable,8 Iterator,9 List,10 Mapping,11 Optional,12 Sequence,13 Set,14 Text,15 Tuple,16 Type,17 TypeVar,18 Union,19 overload,20)21from typing_extensions import Literal22from .connection import ConnectionPool23from .lock import Lock24SYM_EMPTY: Any25def list_or_args(keys, args): ...26def timestamp_to_datetime(response): ...27def string_keys_to_dict(key_string, callback): ...28def dict_merge(*dicts): ...29def parse_debug_object(response): ...30def parse_object(response, infotype): ...31def parse_info(response): ...32SENTINEL_STATE_TYPES: Any33def parse_sentinel_state(item): ...34def parse_sentinel_master(response): ...35def parse_sentinel_masters(response): ...36def parse_sentinel_slaves_and_sentinels(response): ...37def parse_sentinel_get_master(response): ...38def pairs_to_dict(response): ...39def pairs_to_dict_typed(response, type_info): ...40def zset_score_pairs(response, **options): ...41def sort_return_tuples(response, **options): ...42def int_or_none(response): ...43def float_or_none(response): ...44def bool_ok(response): ...45def parse_client_list(response, **options): ...46def parse_config_get(response, **options): ...47def parse_scan(response, **options): ...48def parse_hscan(response, **options): ...49def parse_zscan(response, **options): ...50def parse_slowlog_get(response, **options): ...51_ScoreCastFuncReturn = TypeVar("_ScoreCastFuncReturn")52_Value = Union[bytes, float, int, Text]53_Key = Union[Text, bytes]54# Lib returns str or bytes depending on Python version and value of decode_responses55_StrType = TypeVar("_StrType", bound=Union[Text, bytes])56_LockType = TypeVar("_LockType")57class Redis(Generic[_StrType]):58 RESPONSE_CALLBACKS: Any59 @overload60 @classmethod61 def from_url(62 cls,63 url: Text,64 host: Optional[Text],65 port: Optional[int],66 db: Optional[int],67 password: Optional[Text],68 socket_timeout: Optional[float],69 socket_connect_timeout: Optional[float],70 socket_keepalive: Optional[bool],71 socket_keepalive_options: Optional[Mapping[str, Union[int, str]]],72 connection_pool: Optional[ConnectionPool],73 unix_socket_path: Optional[Text],74 encoding: Text,75 encoding_errors: Text,76 charset: Optional[Text],77 errors: Optional[Text],78 decode_responses: Literal[True],79 retry_on_timeout: bool = ...,80 ssl: bool = ...,81 ssl_keyfile: Optional[Text] = ...,82 ssl_certfile: Optional[Text] = ...,83 ssl_cert_reqs: Optional[Union[str, int]] = ...,84 ssl_ca_certs: Optional[Text] = ...,85 ssl_check_hostname: bool = ...,86 max_connections: Optional[int] = ...,87 single_connection_client: bool = ...,88 health_check_interval: float = ...,89 client_name: Optional[Text] = ...,90 username: Optional[Text] = ...,91 ) -> Redis[str]: ...92 @overload93 @classmethod94 def from_url(95 cls,96 url: Text,97 host: Optional[Text] = ...,98 port: Optional[int] = ...,99 db: Optional[int] = ...,100 password: Optional[Text] = ...,101 socket_timeout: Optional[float] = ...,102 socket_connect_timeout: Optional[float] = ...,103 socket_keepalive: Optional[bool] = ...,104 socket_keepalive_options: Optional[Mapping[str, Union[int, str]]] = ...,105 connection_pool: Optional[ConnectionPool] = ...,106 unix_socket_path: Optional[Text] = ...,107 encoding: Text = ...,108 encoding_errors: Text = ...,109 charset: Optional[Text] = ...,110 errors: Optional[Text] = ...,111 *,112 decode_responses: Literal[True],113 retry_on_timeout: bool = ...,114 ssl: bool = ...,115 ssl_keyfile: Optional[Text] = ...,116 ssl_certfile: Optional[Text] = ...,117 ssl_cert_reqs: Optional[Union[str, int]] = ...,118 ssl_ca_certs: Optional[Text] = ...,119 ssl_check_hostname: bool = ...,120 max_connections: Optional[int] = ...,121 single_connection_client: bool = ...,122 health_check_interval: float = ...,123 client_name: Optional[Text] = ...,124 username: Optional[Text] = ...,125 ) -> Redis[str]: ...126 @overload127 @classmethod128 def from_url(129 cls,130 url: Text,131 host: Optional[Text] = ...,132 port: Optional[int] = ...,133 db: Optional[int] = ...,134 password: Optional[Text] = ...,135 socket_timeout: Optional[float] = ...,136 socket_connect_timeout: Optional[float] = ...,137 socket_keepalive: Optional[bool] = ...,138 socket_keepalive_options: Optional[Mapping[str, Union[int, str]]] = ...,139 connection_pool: Optional[ConnectionPool] = ...,140 unix_socket_path: Optional[Text] = ...,141 encoding: Text = ...,142 encoding_errors: Text = ...,143 charset: Optional[Text] = ...,144 decode_responses: Literal[False] = ...,145 errors: Optional[Text] = ...,146 retry_on_timeout: bool = ...,147 ssl: bool = ...,148 ssl_keyfile: Optional[Text] = ...,149 ssl_certfile: Optional[Text] = ...,150 ssl_cert_reqs: Optional[Union[str, int]] = ...,151 ssl_ca_certs: Optional[Text] = ...,152 ssl_check_hostname: bool = ...,153 max_connections: Optional[int] = ...,154 single_connection_client: bool = ...,155 health_check_interval: float = ...,156 client_name: Optional[Text] = ...,157 username: Optional[Text] = ...,158 ) -> Redis[bytes]: ...159 connection_pool: Any160 response_callbacks: Any161 @overload162 def __new__(163 cls,164 host: Text,165 port: int,166 db: int,167 password: Optional[Text],168 socket_timeout: Optional[float],169 socket_connect_timeout: Optional[float],170 socket_keepalive: Optional[bool],171 socket_keepalive_options: Optional[Mapping[str, Union[int, str]]],172 connection_pool: Optional[ConnectionPool],173 unix_socket_path: Optional[Text],174 encoding: Text,175 encoding_errors: Text,176 charset: Optional[Text],177 decode_responses: Literal[True],178 errors: Optional[Text] = ...,179 retry_on_timeout: bool = ...,180 ssl: bool = ...,181 ssl_keyfile: Optional[Text] = ...,182 ssl_certfile: Optional[Text] = ...,183 ssl_cert_reqs: Optional[Union[str, int]] = ...,184 ssl_ca_certs: Optional[Text] = ...,185 ssl_check_hostname: bool = ...,186 max_connections: Optional[int] = ...,187 single_connection_client: bool = ...,188 health_check_interval: float = ...,189 client_name: Optional[Text] = ...,190 username: Optional[Text] = ...,191 ) -> Redis[str]: ...192 @overload193 def __new__(194 cls,195 host: Text = ...,196 port: int = ...,197 db: int = ...,198 password: Optional[Text] = ...,199 socket_timeout: Optional[float] = ...,200 socket_connect_timeout: Optional[float] = ...,201 socket_keepalive: Optional[bool] = ...,202 socket_keepalive_options: Optional[Mapping[str, Union[int, str]]] = ...,203 connection_pool: Optional[ConnectionPool] = ...,204 unix_socket_path: Optional[Text] = ...,205 encoding: Text = ...,206 encoding_errors: Text = ...,207 charset: Optional[Text] = ...,208 *,209 decode_responses: Literal[True],210 errors: Optional[Text] = ...,211 retry_on_timeout: bool = ...,212 ssl: bool = ...,213 ssl_keyfile: Optional[Text] = ...,214 ssl_certfile: Optional[Text] = ...,215 ssl_cert_reqs: Optional[Union[str, int]] = ...,216 ssl_ca_certs: Optional[Text] = ...,217 ssl_check_hostname: bool = ...,218 max_connections: Optional[int] = ...,219 single_connection_client: bool = ...,220 health_check_interval: float = ...,221 client_name: Optional[Text] = ...,222 username: Optional[Text] = ...,223 ) -> Redis[str]: ...224 @overload225 def __new__(226 cls,227 host: Text = ...,228 port: int = ...,229 db: int = ...,230 password: Optional[Text] = ...,231 socket_timeout: Optional[float] = ...,232 socket_connect_timeout: Optional[float] = ...,233 socket_keepalive: Optional[bool] = ...,234 socket_keepalive_options: Optional[Mapping[str, Union[int, str]]] = ...,235 connection_pool: Optional[ConnectionPool] = ...,236 unix_socket_path: Optional[Text] = ...,237 encoding: Text = ...,238 encoding_errors: Text = ...,239 charset: Optional[Text] = ...,240 errors: Optional[Text] = ...,241 decode_responses: Literal[False] = ...,242 retry_on_timeout: bool = ...,243 ssl: bool = ...,244 ssl_keyfile: Optional[Text] = ...,245 ssl_certfile: Optional[Text] = ...,246 ssl_cert_reqs: Optional[Union[str, int]] = ...,247 ssl_ca_certs: Optional[Text] = ...,248 ssl_check_hostname: bool = ...,249 max_connections: Optional[int] = ...,250 single_connection_client: bool = ...,251 health_check_interval: float = ...,252 client_name: Optional[Text] = ...,253 username: Optional[Text] = ...,254 ) -> Redis[bytes]: ...255 @overload256 def __init__(257 self: Redis[str],258 host: Text,259 port: int,260 db: int,261 password: Optional[Text],262 socket_timeout: Optional[float],263 socket_connect_timeout: Optional[float],264 socket_keepalive: Optional[bool],265 socket_keepalive_options: Optional[Mapping[str, Union[int, str]]],266 connection_pool: Optional[ConnectionPool],267 unix_socket_path: Optional[Text],268 encoding: Text,269 encoding_errors: Text,270 charset: Optional[Text],271 errors: Optional[Text],272 decode_responses: Literal[True],273 retry_on_timeout: bool = ...,274 ssl: bool = ...,275 ssl_keyfile: Optional[Text] = ...,276 ssl_certfile: Optional[Text] = ...,277 ssl_cert_reqs: Optional[Union[str, int]] = ...,278 ssl_ca_certs: Optional[Text] = ...,279 ssl_check_hostname: bool = ...,280 max_connections: Optional[int] = ...,281 single_connection_client: bool = ...,282 health_check_interval: float = ...,283 client_name: Optional[Text] = ...,284 username: Optional[Text] = ...,285 ) -> None: ...286 @overload287 def __init__(288 self: Redis[str],289 host: Text = ...,290 port: int = ...,291 db: int = ...,292 password: Optional[Text] = ...,293 socket_timeout: Optional[float] = ...,294 socket_connect_timeout: Optional[float] = ...,295 socket_keepalive: Optional[bool] = ...,296 socket_keepalive_options: Optional[Mapping[str, Union[int, str]]] = ...,297 connection_pool: Optional[ConnectionPool] = ...,298 unix_socket_path: Optional[Text] = ...,299 encoding: Text = ...,300 encoding_errors: Text = ...,301 charset: Optional[Text] = ...,302 errors: Optional[Text] = ...,303 *,304 decode_responses: Literal[True],305 retry_on_timeout: bool = ...,306 ssl: bool = ...,307 ssl_keyfile: Optional[Text] = ...,308 ssl_certfile: Optional[Text] = ...,309 ssl_cert_reqs: Optional[Union[str, int]] = ...,310 ssl_ca_certs: Optional[Text] = ...,311 ssl_check_hostname: bool = ...,312 max_connections: Optional[int] = ...,313 single_connection_client: bool = ...,314 health_check_interval: float = ...,315 client_name: Optional[Text] = ...,316 username: Optional[Text] = ...,317 ) -> None: ...318 @overload319 def __init__(320 self: Redis[bytes],321 host: Text = ...,322 port: int = ...,323 db: int = ...,324 password: Optional[Text] = ...,325 socket_timeout: Optional[float] = ...,326 socket_connect_timeout: Optional[float] = ...,327 socket_keepalive: Optional[bool] = ...,328 socket_keepalive_options: Optional[Mapping[str, Union[int, str]]] = ...,329 connection_pool: Optional[ConnectionPool] = ...,330 unix_socket_path: Optional[Text] = ...,331 encoding: Text = ...,332 encoding_errors: Text = ...,333 charset: Optional[Text] = ...,334 errors: Optional[Text] = ...,335 decode_responses: Literal[False] = ...,336 retry_on_timeout: bool = ...,337 ssl: bool = ...,338 ssl_keyfile: Optional[Text] = ...,339 ssl_certfile: Optional[Text] = ...,340 ssl_cert_reqs: Optional[Union[str, int]] = ...,341 ssl_ca_certs: Optional[Text] = ...,342 ssl_check_hostname: bool = ...,343 max_connections: Optional[int] = ...,344 single_connection_client: bool = ...,345 health_check_interval: float = ...,346 client_name: Optional[Text] = ...,347 username: Optional[Text] = ...,348 ) -> None: ...349 def set_response_callback(self, command, callback): ...350 def pipeline(self, transaction: bool = ..., shard_hint: Any = ...) -> Pipeline[_StrType]: ...351 def transaction(self, func, *watches, **kwargs): ...352 @overload353 def lock(354 self,355 name: _Key,356 timeout: Optional[float] = ...,357 sleep: float = ...,358 blocking_timeout: Optional[float] = ...,359 lock_class: None = ...,360 thread_local: bool = ...,361 ) -> Lock: ...362 @overload363 def lock(364 self,365 name: _Key,366 timeout: Optional[float],367 sleep: float,368 blocking_timeout: Optional[float],369 lock_class: Type[_LockType],370 thread_local: bool = ...,371 ) -> _LockType: ...372 @overload373 def lock(374 self,375 name: _Key,376 timeout: Optional[float] = ...,377 sleep: float = ...,378 blocking_timeout: Optional[float] = ...,379 *,380 lock_class: Type[_LockType],381 thread_local: bool = ...,382 ) -> _LockType: ...383 def pubsub(self, shard_hint: Any = ..., ignore_subscribe_messages: bool = ...) -> PubSub: ...384 def execute_command(self, *args, **options): ...385 def parse_response(self, connection, command_name, **options): ...386 def acl_cat(self, category: Optional[Text] = ...) -> List[str]: ...387 def acl_deluser(self, username: Text) -> int: ...388 def acl_genpass(self) -> Text: ...389 def acl_getuser(self, username: Text) -> Optional[Any]: ...390 def acl_list(self) -> List[Text]: ...391 def acl_load(self) -> bool: ...392 def acl_setuser(393 self,394 username: Text = ...,395 enabled: bool = ...,396 nopass: bool = ...,397 passwords: Optional[Sequence[Text]] = ...,398 hashed_passwords: Optional[Sequence[Text]] = ...,399 categories: Optional[Sequence[Text]] = ...,400 commands: Optional[Sequence[Text]] = ...,401 keys: Optional[Sequence[Text]] = ...,402 reset: bool = ...,403 reset_keys: bool = ...,404 reset_passwords: bool = ...,405 ) -> bool: ...406 def acl_users(self) -> List[Text]: ...407 def acl_whoami(self) -> Text: ...408 def bgrewriteaof(self): ...409 def bgsave(self): ...410 def client_id(self) -> int: ...411 def client_kill(self, address: Text) -> bool: ...412 def client_list(self) -> List[Dict[str, str]]: ...413 def client_getname(self) -> Optional[str]: ...414 def client_setname(self, name: Text) -> bool: ...415 def readwrite(self) -> bool: ...416 def readonly(self) -> bool: ...417 def config_get(self, pattern=...): ...418 def config_set(self, name, value): ...419 def config_resetstat(self): ...420 def config_rewrite(self): ...421 def dbsize(self) -> int: ...422 def debug_object(self, key): ...423 def echo(self, value: _Value) -> bytes: ...424 def flushall(self) -> bool: ...425 def flushdb(self) -> bool: ...426 def info(self, section: Optional[_Key] = ...) -> Mapping[str, Any]: ...427 def lastsave(self): ...428 def object(self, infotype, key): ...429 def ping(self) -> bool: ...430 def save(self) -> bool: ...431 def sentinel(self, *args): ...432 def sentinel_get_master_addr_by_name(self, service_name): ...433 def sentinel_master(self, service_name): ...434 def sentinel_masters(self): ...435 def sentinel_monitor(self, name, ip, port, quorum): ...436 def sentinel_remove(self, name): ...437 def sentinel_sentinels(self, service_name): ...438 def sentinel_set(self, name, option, value): ...439 def sentinel_slaves(self, service_name): ...440 def shutdown(self): ...441 def slaveof(self, host=..., port=...): ...442 def slowlog_get(self, num=...): ...443 def slowlog_len(self): ...444 def slowlog_reset(self): ...445 def time(self): ...446 def append(self, key, value): ...447 def bitcount(self, key: _Key, start: Optional[int] = ..., end: Optional[int] = ...) -> int: ...448 def bitop(self, operation, dest, *keys): ...449 def bitpos(self, key, bit, start=..., end=...): ...450 def decr(self, name, amount=...): ...451 def delete(self, *names: _Key) -> int: ...452 def __delitem__(self, _Key): ...453 def dump(self, name): ...454 def exists(self, *names: _Key) -> int: ...455 __contains__: Any456 def expire(self, name: _Key, time: Union[int, timedelta]) -> bool: ...457 def expireat(self, name, when): ...458 def get(self, name: _Key) -> Optional[_StrType]: ...459 def __getitem__(self, name): ...460 def getbit(self, name: _Key, offset: int) -> int: ...461 def getrange(self, key, start, end): ...462 def getset(self, name, value) -> Optional[_StrType]: ...463 def incr(self, name: _Key, amount: int = ...) -> int: ...464 def incrby(self, name: _Key, amount: int = ...) -> int: ...465 def incrbyfloat(self, name: _Key, amount: float = ...) -> float: ...466 def keys(self, pattern: _Key = ...) -> List[_StrType]: ...467 def mget(self, keys: Union[_Key, Iterable[_Key]], *args: _Key) -> List[Optional[_StrType]]: ...468 def mset(self, mapping: Mapping[_Key, _Value]) -> Literal[True]: ...469 def msetnx(self, mapping: Mapping[_Key, _Value]) -> bool: ...470 def move(self, name: _Key, db: int) -> bool: ...471 def persist(self, name: _Key) -> bool: ...472 def pexpire(self, name: _Key, time: Union[int, timedelta]) -> Literal[1, 0]: ...473 def pexpireat(self, name: _Key, when: Union[int, datetime]) -> Literal[1, 0]: ...474 def psetex(self, name, time_ms, value): ...475 def pttl(self, name): ...476 def randomkey(self): ...477 def rename(self, src, dst): ...478 def renamenx(self, src, dst): ...479 def restore(self, name, ttl, value): ...480 def set(481 self,482 name: _Key,483 value: _Value,484 ex: Union[None, int, timedelta] = ...,485 px: Union[None, int, timedelta] = ...,486 nx: bool = ...,487 xx: bool = ...,488 keepttl: bool = ...,489 ) -> Optional[bool]: ...490 def __setitem__(self, name, value): ...491 def setbit(self, name: _Key, offset: int, value: int) -> int: ...492 def setex(self, name: _Key, time: Union[int, timedelta], value: _Value) -> bool: ...493 def setnx(self, name: _Key, value: _Value) -> bool: ...494 def setrange(self, name, offset, value): ...495 def strlen(self, name): ...496 def substr(self, name, start, end=...): ...497 def ttl(self, name: _Key) -> int: ...498 def type(self, name): ...499 def watch(self, *names): ...500 def unlink(self, *names: _Key) -> int: ...501 def unwatch(self): ...502 @overload503 def blpop(self, keys: Union[_Value, Iterable[_Value]], timeout: Literal[0] = ...) -> Tuple[_StrType, _StrType]: ...504 @overload505 def blpop(self, keys: Union[_Value, Iterable[_Value]], timeout: float) -> Optional[Tuple[_StrType, _StrType]]: ...506 @overload507 def brpop(self, keys: Union[_Value, Iterable[_Value]], timeout: Literal[0] = ...) -> Tuple[_StrType, _StrType]: ...508 @overload509 def brpop(self, keys: Union[_Value, Iterable[_Value]], timeout: float) -> Optional[Tuple[_StrType, _StrType]]: ...510 def brpoplpush(self, src, dst, timeout=...): ...511 def lindex(self, name: _Key, index: int) -> Optional[_StrType]: ...512 def linsert(513 self, name: _Key, where: Literal["BEFORE", "AFTER", "before", "after"], refvalue: _Value, value: _Value514 ) -> int: ...515 def llen(self, name: _Key) -> int: ...516 def lpop(self, name): ...517 def lpush(self, name: _Value, *values: _Value) -> int: ...518 def lpushx(self, name, value): ...519 def lrange(self, name: _Key, start: int, end: int) -> List[_StrType]: ...520 def lrem(self, name: _Key, count: int, value: _Value) -> int: ...521 def lset(self, name: _Key, index: int, value: _Value) -> bool: ...522 def ltrim(self, name: _Key, start: int, end: int) -> bool: ...523 def rpop(self, name): ...524 def rpoplpush(self, src, dst): ...525 def rpush(self, name: _Value, *values: _Value) -> int: ...526 def rpushx(self, name, value): ...527 @overload528 def sort(529 self,530 name: _Key,531 start: Optional[int] = ...,532 num: Optional[int] = ...,533 by: Optional[_Key] = ...,534 get: Optional[Union[_Key, Sequence[_Key]]] = ...,535 desc: bool = ...,536 alpha: bool = ...,537 store: None = ...,538 groups: bool = ...,539 ) -> List[_StrType]: ...540 @overload541 def sort(542 self,543 name: _Key,544 start: Optional[int] = ...,545 num: Optional[int] = ...,546 by: Optional[_Key] = ...,547 get: Optional[Union[_Key, Sequence[_Key]]] = ...,548 desc: bool = ...,549 alpha: bool = ...,550 *,551 store: _Key,552 groups: bool = ...,553 ) -> int: ...554 @overload555 def sort(556 self,557 name: _Key,558 start: Optional[int],559 num: Optional[int],560 by: Optional[_Key],561 get: Optional[Union[_Key, Sequence[_Key]]],562 desc: bool,563 alpha: bool,564 store: _Key,565 groups: bool = ...,566 ) -> int: ...567 def scan(self, cursor: int = ..., match: Optional[_Key] = ..., count: Optional[int] = ...) -> Tuple[int, List[_StrType]]: ...568 def scan_iter(self, match: Optional[Text] = ..., count: Optional[int] = ...) -> Iterator[_StrType]: ...569 def sscan(self, name: _Key, cursor: int = ..., match: Text = ..., count: int = ...) -> Tuple[int, List[_StrType]]: ...570 def sscan_iter(self, name, match=..., count=...): ...571 def hscan(572 self, name: _Key, cursor: int = ..., match: Text = ..., count: int = ...573 ) -> Tuple[int, Dict[_StrType, _StrType]]: ...574 def hscan_iter(self, name, match=..., count=...): ...575 def zscan(self, name, cursor=..., match=..., count=..., score_cast_func=...): ...576 def zscan_iter(self, name, match=..., count=..., score_cast_func=...): ...577 def sadd(self, name: _Key, *values: _Value) -> int: ...578 def scard(self, name: _Key) -> int: ...579 def sdiff(self, keys: Union[_Key, Iterable[_Key]], *args: _Key) -> Set[_Value]: ...580 def sdiffstore(self, dest: _Key, keys: Union[_Key, Iterable[_Key]], *args: _Key) -> int: ...581 def sinter(self, keys: Union[_Key, Iterable[_Key]], *args: _Key) -> Set[_Value]: ...582 def sinterstore(self, dest: _Key, keys: Union[_Key, Iterable[_Key]], *args: _Key) -> int: ...583 def sismember(self, name: _Key, value: _Value) -> bool: ...584 def smembers(self, name: _Key) -> Set[_StrType]: ...585 def smove(self, src: _Key, dst: _Key, value: _Value) -> bool: ...586 @overload587 def spop(self, name: _Key, count: None = ...) -> Optional[_Value]: ...588 @overload589 def spop(self, name: _Key, count: int) -> List[_Value]: ...590 @overload591 def srandmember(self, name: _Key, number: None = ...) -> Optional[_Value]: ...592 @overload593 def srandmember(self, name: _Key, number: int) -> List[_Value]: ...594 def srem(self, name: _Key, *values: _Value) -> int: ...595 def sunion(self, keys: Union[_Key, Iterable[_Key]], *args: _Key) -> Set[_Value]: ...596 def sunionstore(self, dest: _Key, keys: Union[_Key, Iterable[_Key]], *args: _Key) -> int: ...597 def xack(self, name, groupname, *ids): ...598 def xadd(self, name, fields, id=..., maxlen=..., approximate=...): ...599 def xclaim(600 self, name, groupname, consumername, min_idle_time, message_ids, idle=..., time=..., retrycount=..., force=..., justid=...601 ): ...602 def xdel(self, name, *ids): ...603 def xgroup_create(self, name, groupname, id=..., mkstream=...): ...604 def xgroup_delconsumer(self, name, groupname, consumername): ...605 def xgroup_destroy(self, name, groupname): ...606 def xgroup_setid(self, name, groupname, id): ...607 def xinfo_consumers(self, name, groupname): ...608 def xinfo_groups(self, name): ...609 def xinfo_stream(self, name): ...610 def xlen(self, name: _Key) -> int: ...611 def xpending(self, name, groupname): ...612 def xpending_range(self, name, groupname, min, max, count, consumername=...): ...613 def xrange(self, name, min=..., max=..., count=...): ...614 def xread(self, streams, count=..., block=...): ...615 def xreadgroup(self, groupname, consumername, streams, count=..., block=..., noack=...): ...616 def xrevrange(self, name, max=..., min=..., count=...): ...617 def xtrim(self, name, maxlen, approximate=...): ...618 def zadd(619 self, name: _Key, mapping: Mapping[_Key, _Value], nx: bool = ..., xx: bool = ..., ch: bool = ..., incr: bool = ...620 ) -> int: ...621 def zcard(self, name: _Key) -> int: ...622 def zcount(self, name: _Key, min: _Value, max: _Value) -> int: ...623 def zincrby(self, name: _Key, amount: float, value: _Value) -> float: ...624 def zinterstore(self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] = ...) -> int: ...625 def zlexcount(self, name: _Key, min: _Value, max: _Value) -> int: ...626 def zpopmax(self, name: _Key, count: Optional[int] = ...) -> List[_StrType]: ...627 def zpopmin(self, name: _Key, count: Optional[int] = ...) -> List[_StrType]: ...628 @overload629 def bzpopmax(self, keys: Union[_Key, Iterable[_Key]], timeout: Literal[0] = ...) -> Tuple[_StrType, _StrType, float]: ...630 @overload631 def bzpopmax(self, keys: Union[_Key, Iterable[_Key]], timeout: float) -> Optional[Tuple[_StrType, _StrType, float]]: ...632 @overload633 def bzpopmin(self, keys: Union[_Key, Iterable[_Key]], timeout: Literal[0] = ...) -> Tuple[_StrType, _StrType, float]: ...634 @overload635 def bzpopmin(self, keys: Union[_Key, Iterable[_Key]], timeout: float) -> Optional[Tuple[_StrType, _StrType, float]]: ...636 @overload637 def zrange(638 self,639 name: _Key,640 start: int,641 end: int,642 desc: bool = ...,643 *,644 withscores: Literal[True],645 score_cast_func: Callable[[float], _ScoreCastFuncReturn] = ...,646 ) -> List[Tuple[_StrType, _ScoreCastFuncReturn]]: ...647 @overload648 def zrange(649 self,650 name: _Key,651 start: int,652 end: int,653 desc: bool = ...,654 withscores: bool = ...,655 score_cast_func: Callable[[Any], Any] = ...,656 ) -> List[_StrType]: ...657 def zrangebylex(658 self, name: _Key, min: _Value, max: _Value, start: Optional[int] = ..., num: Optional[int] = ...659 ) -> List[_StrType]: ...660 @overload661 def zrangebyscore(662 self,663 name: _Key,664 min: _Value,665 max: _Value,666 start: Optional[int] = ...,667 num: Optional[int] = ...,668 *,669 withscores: Literal[True],670 score_cast_func: Callable[[float], _ScoreCastFuncReturn] = ...,671 ) -> List[Tuple[_StrType, _ScoreCastFuncReturn]]: ...672 @overload673 def zrangebyscore(674 self,675 name: _Key,676 min: _Value,677 max: _Value,678 start: Optional[int] = ...,679 num: Optional[int] = ...,680 withscores: bool = ...,681 score_cast_func: Callable[[Any], Any] = ...,682 ) -> List[_StrType]: ...683 def zrank(self, name: _Key, value: _Value) -> Optional[int]: ...684 def zrem(self, name: _Key, *values: _Value) -> int: ...685 def zremrangebylex(self, name: _Key, min: _Value, max: _Value) -> int: ...686 def zremrangebyrank(self, name: _Key, min: int, max: int) -> int: ...687 def zremrangebyscore(self, name: _Key, min: _Value, max: _Value) -> int: ...688 @overload689 def zrevrange(690 self,691 name: _Key,692 start: int,693 end: int,694 desc: bool = ...,695 *,696 withscores: Literal[True],697 score_cast_func: Callable[[float], _ScoreCastFuncReturn] = ...,698 ) -> List[Tuple[_StrType, _ScoreCastFuncReturn]]: ...699 @overload700 def zrevrange(701 self,702 name: _Key,703 start: int,704 end: int,705 desc: bool = ...,706 withscores: bool = ...,707 score_cast_func: Callable[[Any], Any] = ...,708 ) -> List[_StrType]: ...709 @overload710 def zrevrangebyscore(711 self,712 name: _Key,713 min: _Value,714 max: _Value,715 start: Optional[int] = ...,716 num: Optional[int] = ...,717 *,718 withscores: Literal[True],719 score_cast_func: Callable[[float], _ScoreCastFuncReturn] = ...,720 ) -> List[Tuple[_StrType, _ScoreCastFuncReturn]]: ...721 @overload722 def zrevrangebyscore(723 self,724 name: _Key,725 min: _Value,726 max: _Value,727 start: Optional[int] = ...,728 num: Optional[int] = ...,729 withscores: bool = ...,730 score_cast_func: Callable[[Any], Any] = ...,731 ) -> List[_StrType]: ...732 def zrevrangebylex(733 self, name: _Key, min: _Value, max: _Value, start: Optional[int] = ..., num: Optional[int] = ...734 ) -> List[_StrType]: ...735 def zrevrank(self, name: _Key, value: _Value) -> Optional[int]: ...736 def zscore(self, name: _Key, value: _Value) -> Optional[float]: ...737 def zunionstore(self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] = ...) -> int: ...738 def pfadd(self, name: _Key, *values: _Value) -> int: ...739 def pfcount(self, name: _Key) -> int: ...740 def pfmerge(self, dest: _Key, *sources: _Key) -> bool: ...741 def hdel(self, name: _Key, *keys: _Key) -> int: ...742 def hexists(self, name: _Key, key: _Key) -> bool: ...743 def hget(self, name: _Key, key: _Key) -> Optional[_StrType]: ...744 def hgetall(self, name: _Key) -> Dict[_StrType, _StrType]: ...745 def hincrby(self, name: _Key, key: _Key, amount: int = ...) -> int: ...746 def hincrbyfloat(self, name: _Key, key: _Key, amount: float = ...) -> float: ...747 def hkeys(self, name: _Key) -> List[_StrType]: ...748 def hlen(self, name: _Key) -> int: ...749 @overload750 def hset(self, name: _Key, key: _Key, value: _Value, mapping: Optional[Mapping[_Key, _Value]] = ...) -> int: ...751 @overload752 def hset(self, name: _Key, key: None, value: None, mapping: Mapping[_Key, _Value]) -> int: ...753 @overload754 def hset(self, name: _Key, *, mapping: Mapping[_Key, _Value]) -> int: ...755 def hsetnx(self, name: _Key, key: _Key, value: _Value) -> int: ...756 def hmset(self, name: _Key, mapping: Mapping[_Key, _Value]) -> bool: ...757 def hmget(self, name: _Key, keys: Union[_Key, Iterable[_Key]], *args: _Key) -> List[Optional[_StrType]]: ...758 def hvals(self, name: _Key) -> List[_StrType]: ...759 def publish(self, channel: _Key, message: _Key) -> int: ...760 def eval(self, script, numkeys, *keys_and_args): ...761 def evalsha(self, sha, numkeys, *keys_and_args): ...762 def script_exists(self, *args): ...763 def script_flush(self): ...764 def script_kill(self): ...765 def script_load(self, script): ...766 def register_script(self, script: Union[Text, _StrType]) -> Script: ...767 def pubsub_channels(self, pattern: _Key = ...) -> List[Text]: ...768 def pubsub_numsub(self, *args: _Key) -> List[Tuple[Text, int]]: ...769 def pubsub_numpat(self) -> int: ...770 def monitor(self) -> Monitor: ...771 def cluster(self, cluster_arg: str, *args: Any) -> Any: ...772 def __enter__(self) -> Redis[_StrType]: ...773 def __exit__(self, exc_type, exc_value, traceback): ...774 def __del__(self) -> None: ...775 def close(self) -> None: ...776 def client(self) -> Redis[_StrType]: ...777StrictRedis = Redis778class PubSub:779 PUBLISH_MESSAGE_TYPES: Any780 UNSUBSCRIBE_MESSAGE_TYPES: Any781 connection_pool: Any782 shard_hint: Any783 ignore_subscribe_messages: Any784 connection: Any785 encoding: Any786 encoding_errors: Any787 decode_responses: Any788 def __init__(self, connection_pool, shard_hint=..., ignore_subscribe_messages=...) -> None: ...789 def __del__(self): ...790 channels: Any791 patterns: Any792 def reset(self): ...793 def close(self) -> None: ...794 def on_connect(self, connection): ...795 def encode(self, value): ...796 @property797 def subscribed(self): ...798 def execute_command(self, *args, **kwargs): ...799 def parse_response(self, block=...): ...800 def psubscribe(self, *args: _Key, **kwargs: Callable[[Any], None]): ...801 def punsubscribe(self, *args: _Key) -> None: ...802 def subscribe(self, *args: _Key, **kwargs: Callable[[Any], None]) -> None: ...803 def unsubscribe(self, *args: _Key) -> None: ...804 def listen(self): ...805 def get_message(self, ignore_subscribe_messages: bool = ..., timeout: float = ...) -> Optional[Dict[str, Any]]: ...806 def handle_message(self, response, ignore_subscribe_messages: bool = ...) -> Optional[Dict[str, Any]]: ...807 def run_in_thread(self, sleep_time=...): ...808 def ping(self, message: Optional[_Value] = ...) -> None: ...809class Pipeline(Redis[_StrType], Generic[_StrType]):810 UNWATCH_COMMANDS: Any811 connection_pool: Any812 connection: Any813 response_callbacks: Any814 transaction: bool815 shard_hint: Any816 watching: bool817 command_stack: Any818 scripts: Any819 explicit_transaction: Any820 def __init__(self, connection_pool, response_callbacks, transaction, shard_hint) -> None: ...821 def __enter__(self) -> Pipeline[_StrType]: ... # type: ignore [override]822 def __exit__(self, exc_type, exc_value, traceback) -> None: ...823 def __del__(self) -> None: ...824 def __len__(self) -> int: ...825 def __bool__(self) -> bool: ...826 def reset(self) -> None: ...827 def multi(self) -> None: ...828 def execute_command(self, *args, **options): ...829 def immediate_execute_command(self, *args, **options): ...830 def pipeline_execute_command(self, *args, **options): ...831 def raise_first_error(self, commands, response): ...832 def annotate_exception(self, exception, number, command): ...833 def parse_response(self, connection, command_name, **options): ...834 def load_scripts(self): ...835 def execute(self, raise_on_error: bool = ...) -> List[Any]: ...836 def watch(self, *names: _Key) -> bool: ...837 def unwatch(self) -> bool: ...838 # in the Redis implementation, the following methods are inherited from client.839 def set_response_callback(self, command, callback): ...840 def pipeline(self, transaction: bool = ..., shard_hint: Any = ...) -> Pipeline[_StrType]: ... # type: ignore [override]841 def lock(self, name, timeout=..., sleep=..., blocking_timeout=..., lock_class=..., thread_local=...): ...842 def pubsub(self, shard_hint: Any = ..., ignore_subscribe_messages: bool = ...) -> PubSub: ...843 def acl_cat(self, category: Optional[Text] = ...) -> Pipeline[_StrType]: ... # type: ignore [override]844 def acl_deluser(self, username: Text) -> Pipeline[_StrType]: ... # type: ignore [override]845 def acl_genpass(self) -> Pipeline[_StrType]: ... # type: ignore [override]846 def acl_getuser(self, username: Text) -> Pipeline[_StrType]: ... # type: ignore [override]847 def acl_list(self) -> Pipeline[_StrType]: ... # type: ignore [override]848 def acl_load(self) -> Pipeline[_StrType]: ... # type: ignore [override]849 def acl_setuser( # type: ignore [override]850 self,851 username: Text = ...,852 enabled: bool = ...,853 nopass: bool = ...,854 passwords: Optional[Sequence[Text]] = ...,855 hashed_passwords: Optional[Sequence[Text]] = ...,856 categories: Optional[Sequence[Text]] = ...,857 commands: Optional[Sequence[Text]] = ...,858 keys: Optional[Sequence[Text]] = ...,859 reset: bool = ...,860 reset_keys: bool = ...,861 reset_passwords: bool = ...,862 ) -> Pipeline[_StrType]: ...863 def acl_users(self) -> Pipeline[_StrType]: ... # type: ignore [override]864 def acl_whoami(self) -> Pipeline[_StrType]: ... # type: ignore [override]865 def bgrewriteaof(self) -> Pipeline[_StrType]: ... # type: ignore [override]866 def bgsave(self) -> Pipeline[_StrType]: ... # type: ignore [override]867 def client_id(self) -> Pipeline[_StrType]: ... # type: ignore [override]868 def client_kill(self, address: Text) -> Pipeline[_StrType]: ... # type: ignore [override]869 def client_list(self) -> Pipeline[_StrType]: ... # type: ignore [override]870 def client_getname(self) -> Pipeline[_StrType]: ... # type: ignore [override]871 def client_setname(self, name: Text) -> Pipeline[_StrType]: ... # type: ignore [override]872 def readwrite(self) -> Pipeline[_StrType]: ... # type: ignore [override]873 def readonly(self) -> Pipeline[_StrType]: ... # type: ignore [override]874 def config_get(self, pattern=...) -> Pipeline[_StrType]: ... # type: ignore [override]875 def config_set(self, name, value) -> Pipeline[_StrType]: ... # type: ignore [override]876 def config_resetstat(self) -> Pipeline[_StrType]: ... # type: ignore [override]877 def config_rewrite(self) -> Pipeline[_StrType]: ... # type: ignore [override]878 def dbsize(self) -> Pipeline[_StrType]: ... # type: ignore [override]879 def debug_object(self, key) -> Pipeline[_StrType]: ... # type: ignore [override]880 def echo(self, value) -> Pipeline[_StrType]: ... # type: ignore [override]881 def flushall(self) -> Pipeline[_StrType]: ... # type: ignore [override]882 def flushdb(self) -> Pipeline[_StrType]: ... # type: ignore [override]883 def info(self, section: Optional[_Key] = ...) -> Pipeline[_StrType]: ... # type: ignore [override]884 def lastsave(self) -> Pipeline[_StrType]: ... # type: ignore [override]885 def object(self, infotype, key) -> Pipeline[_StrType]: ... # type: ignore [override]886 def ping(self) -> Pipeline[_StrType]: ... # type: ignore [override]887 def save(self) -> Pipeline[_StrType]: ... # type: ignore [override]888 def sentinel(self, *args) -> None: ...889 def sentinel_get_master_addr_by_name(self, service_name) -> Pipeline[_StrType]: ... # type: ignore [override]890 def sentinel_master(self, service_name) -> Pipeline[_StrType]: ... # type: ignore [override]891 def sentinel_masters(self) -> Pipeline[_StrType]: ... # type: ignore [override]892 def sentinel_monitor(self, name, ip, port, quorum) -> Pipeline[_StrType]: ... # type: ignore [override]893 def sentinel_remove(self, name) -> Pipeline[_StrType]: ... # type: ignore [override]894 def sentinel_sentinels(self, service_name) -> Pipeline[_StrType]: ... # type: ignore [override]895 def sentinel_set(self, name, option, value) -> Pipeline[_StrType]: ... # type: ignore [override]896 def sentinel_slaves(self, service_name) -> Pipeline[_StrType]: ... # type: ignore [override]897 def shutdown(self) -> None: ...898 def slaveof(self, host=..., port=...) -> Pipeline[_StrType]: ... # type: ignore [override]899 def slowlog_get(self, num=...) -> Pipeline[_StrType]: ... # type: ignore [override]900 def slowlog_len(self) -> Pipeline[_StrType]: ... # type: ignore [override]901 def slowlog_reset(self) -> Pipeline[_StrType]: ... # type: ignore [override]902 def time(self) -> Pipeline[_StrType]: ... # type: ignore [override]903 def append(self, key, value) -> Pipeline[_StrType]: ... # type: ignore [override]904 def bitcount(self, key: _Key, start: Optional[int] = ..., end: Optional[int] = ...) -> Pipeline[_StrType]: ... # type: ignore [override]905 def bitop(self, operation, dest, *keys) -> Pipeline[_StrType]: ... # type: ignore [override]906 def bitpos(self, key, bit, start=..., end=...) -> Pipeline[_StrType]: ... # type: ignore [override]907 def decr(self, name, amount=...) -> Pipeline[_StrType]: ... # type: ignore [override]908 def delete(self, *names: _Key) -> Pipeline[_StrType]: ... # type: ignore [override]909 def __delitem__(self, _Key) -> None: ...910 def dump(self, name) -> Pipeline[_StrType]: ... # type: ignore [override]911 def exists(self, *names: _Key) -> Pipeline[_StrType]: ... # type: ignore [override]912 def __contains__(self, *names: _Key) -> Pipeline[_StrType]: ... # type: ignore [override]913 def expire(self, name: _Key, time: Union[int, timedelta]) -> Pipeline[_StrType]: ... # type: ignore [override]914 def expireat(self, name, when) -> Pipeline[_StrType]: ... # type: ignore [override]915 def get(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore [override]916 def __getitem__(self, name) -> Pipeline[_StrType]: ... # type: ignore [override]917 def getbit(self, name: _Key, offset: int) -> Pipeline[_StrType]: ... # type: ignore [override]918 def getrange(self, key, start, end) -> Pipeline[_StrType]: ... # type: ignore [override]919 def getset(self, name, value) -> Pipeline[_StrType]: ... # type: ignore [override]920 def incr(self, name, amount=...) -> Pipeline[_StrType]: ... # type: ignore [override]921 def incrby(self, name, amount=...) -> Pipeline[_StrType]: ... # type: ignore [override]922 def incrbyfloat(self, name, amount=...) -> Pipeline[_StrType]: ... # type: ignore [override]923 def keys(self, pattern: _Key = ...) -> Pipeline[_StrType]: ... # type: ignore [override]924 def mget(self, keys: Union[_Key, Iterable[_Key]], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore [override]925 def mset(self, mapping: Mapping[_Key, _Value]) -> Pipeline[_StrType]: ... # type: ignore [override]926 def msetnx(self, mapping: Mapping[_Key, _Value]) -> Pipeline[_StrType]: ... # type: ignore [override]927 def move(self, name: _Key, db: int) -> Pipeline[_StrType]: ... # type: ignore [override]928 def persist(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore [override]929 def pexpire(self, name: _Key, time: Union[int, timedelta]) -> Pipeline[_StrType]: ... # type: ignore [override]930 def pexpireat(self, name: _Key, when: Union[int, datetime]) -> Pipeline[_StrType]: ... # type: ignore [override]931 def psetex(self, name, time_ms, value) -> Pipeline[_StrType]: ... # type: ignore [override]932 def pttl(self, name) -> Pipeline[_StrType]: ... # type: ignore [override]933 def randomkey(self) -> Pipeline[_StrType]: ... # type: ignore [override]934 def rename(self, src, dst) -> Pipeline[_StrType]: ... # type: ignore [override]935 def renamenx(self, src, dst) -> Pipeline[_StrType]: ... # type: ignore [override]936 def restore(self, name, ttl, value) -> Pipeline[_StrType]: ... # type: ignore [override]937 def set( # type: ignore [override]938 self,939 name: _Key,940 value: _Value,941 ex: Union[None, int, timedelta] = ...,942 px: Union[None, int, timedelta] = ...,943 nx: bool = ...,944 xx: bool = ...,945 keepttl: bool = ...,946 ) -> Pipeline[_StrType]: ...947 def __setitem__(self, name, value) -> None: ...948 def setbit(self, name: _Key, offset: int, value: int) -> Pipeline[_StrType]: ... # type: ignore [override]949 def setex(self, name: _Key, time: Union[int, timedelta], value: _Value) -> Pipeline[_StrType]: ... # type: ignore [override]950 def setnx(self, name, value) -> Pipeline[_StrType]: ... # type: ignore [override]951 def setrange(self, name, offset, value) -> Pipeline[_StrType]: ... # type: ignore [override]952 def strlen(self, name) -> Pipeline[_StrType]: ... # type: ignore [override]953 def substr(self, name, start, end=...) -> Pipeline[_StrType]: ... # type: ignore [override]954 def ttl(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore [override]955 def type(self, name) -> Pipeline[_StrType]: ... # type: ignore [override]956 def unlink(self, *names: _Key) -> Pipeline[_StrType]: ... # type: ignore [override]957 def blpop(self, keys: Union[_Value, Iterable[_Value]], timeout: float = ...) -> Pipeline[_StrType]: ... # type: ignore [override]958 def brpop(self, keys: Union[_Value, Iterable[_Value]], timeout: float = ...) -> Pipeline[_StrType]: ... # type: ignore [override]959 def brpoplpush(self, src, dst, timeout=...) -> Pipeline[_StrType]: ... # type: ignore [override]960 def lindex(self, name: _Key, index: int) -> Pipeline[_StrType]: ... # type: ignore [override]961 def linsert( # type: ignore [override]962 self, name: _Key, where: Literal["BEFORE", "AFTER", "before", "after"], refvalue: _Value, value: _Value963 ) -> Pipeline[_StrType]: ...964 def llen(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore [override]965 def lpop(self, name) -> Pipeline[_StrType]: ... # type: ignore [override]966 def lpush(self, name: _Value, *values: _Value) -> Pipeline[_StrType]: ... # type: ignore [override]967 def lpushx(self, name, value) -> Pipeline[_StrType]: ... # type: ignore [override]968 def lrange(self, name: _Key, start: int, end: int) -> Pipeline[_StrType]: ... # type: ignore [override]969 def lrem(self, name: _Key, count: int, value: _Value) -> Pipeline[_StrType]: ... # type: ignore [override]970 def lset(self, name: _Key, index: int, value: _Value) -> Pipeline[_StrType]: ... # type: ignore [override]971 def ltrim(self, name: _Key, start: int, end: int) -> Pipeline[_StrType]: ... # type: ignore [override]972 def rpop(self, name) -> Pipeline[_StrType]: ... # type: ignore [override]973 def rpoplpush(self, src, dst) -> Pipeline[_StrType]: ... # type: ignore [override]974 def rpush(self, name: _Value, *values: _Value) -> Pipeline[_StrType]: ... # type: ignore [override]975 def rpushx(self, name, value) -> Pipeline[_StrType]: ... # type: ignore [override]976 def sort( # type: ignore [override]977 self,978 name: _Key,979 start: Optional[int] = ...,980 num: Optional[int] = ...,981 by: Optional[_Key] = ...,982 get: Optional[Union[_Key, Sequence[_Key]]] = ...,983 desc: bool = ...,984 alpha: bool = ...,985 store: Optional[_Key] = ...,986 groups: bool = ...,987 ) -> Pipeline[_StrType]: ...988 def scan(self, cursor: int = ..., match: Optional[_Key] = ..., count: Optional[int] = ...) -> Pipeline[_StrType]: ... # type: ignore [override]989 def scan_iter(self, match: Optional[Text] = ..., count: Optional[int] = ...) -> Iterator[Any]: ...990 def sscan(self, name: _Key, cursor: int = ..., match: Text = ..., count: int = ...) -> Pipeline[_StrType]: ... # type: ignore [override]991 def sscan_iter(self, name, match=..., count=...) -> Iterator[Any]: ...992 def hscan(self, name: _Key, cursor: int = ..., match: Text = ..., count: int = ...) -> Pipeline[_StrType]: ... # type: ignore [override]993 def hscan_iter(self, name, match=..., count=...) -> Iterator[Any]: ...994 def zscan(self, name, cursor=..., match=..., count=..., score_cast_func=...) -> Pipeline[_StrType]: ... # type: ignore [override]995 def zscan_iter(self, name, match=..., count=..., score_cast_func=...) -> Iterator[Any]: ...996 def sadd(self, name: _Key, *values: _Value) -> Pipeline[_StrType]: ... # type: ignore [override]997 def scard(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore [override]998 def sdiff(self, keys: Union[_Key, Iterable[_Key]], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore [override]999 def sdiffstore(self, dest: _Key, keys: Union[_Key, Iterable[_Key]], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore [override]1000 def sinter(self, keys: Union[_Key, Iterable[_Key]], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore [override]1001 def sinterstore(self, dest: _Key, keys: Union[_Key, Iterable[_Key]], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore [override]1002 def sismember(self, name: _Key, value: _Value) -> Pipeline[_StrType]: ... # type: ignore [override]1003 def smembers(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore [override]1004 def smove(self, src: _Key, dst: _Key, value: _Value) -> Pipeline[_StrType]: ... # type: ignore [override]1005 def spop(self, name: _Key, count: Optional[int] = ...) -> Pipeline[_StrType]: ... # type: ignore [override]1006 def srandmember(self, name: _Key, number: Optional[int] = ...) -> Pipeline[_StrType]: ... # type: ignore [override]1007 def srem(self, name: _Key, *values: _Value) -> Pipeline[_StrType]: ... # type: ignore [override]1008 def sunion(self, keys: Union[_Key, Iterable[_Key]], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore [override]1009 def sunionstore(self, dest: _Key, keys: Union[_Key, Iterable[_Key]], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore [override]1010 def xack(self, name, groupname, *ids) -> Pipeline[_StrType]: ... # type: ignore [override]1011 def xadd(self, name, fields, id=..., maxlen=..., approximate=...) -> Pipeline[_StrType]: ... # type: ignore [override]1012 def xclaim(1013 self, name, groupname, consumername, min_idle_time, message_ids, idle=..., time=..., retrycount=..., force=..., justid=...1014 ) -> Pipeline[_StrType]: ... # type: ignore [override]1015 def xdel(self, name, *ids) -> Pipeline[_StrType]: ... # type: ignore [override]1016 def xgroup_create(self, name, groupname, id=..., mkstream=...) -> Pipeline[_StrType]: ... # type: ignore [override]1017 def xgroup_delconsumer(self, name, groupname, consumername) -> Pipeline[_StrType]: ... # type: ignore [override]1018 def xgroup_destroy(self, name, groupname) -> Pipeline[_StrType]: ... # type: ignore [override]1019 def xgroup_setid(self, name, groupname, id) -> Pipeline[_StrType]: ... # type: ignore [override]1020 def xinfo_consumers(self, name, groupname) -> Pipeline[_StrType]: ... # type: ignore [override]1021 def xinfo_groups(self, name) -> Pipeline[_StrType]: ... # type: ignore [override]1022 def xinfo_stream(self, name) -> Pipeline[_StrType]: ... # type: ignore [override]1023 def xlen(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore [override]1024 def xpending(self, name, groupname) -> Pipeline[_StrType]: ... # type: ignore [override]1025 def xpending_range(self, name, groupname, min, max, count, consumername=...) -> Pipeline[_StrType]: ... # type: ignore [override]1026 def xrange(self, name, min=..., max=..., count=...) -> Pipeline[_StrType]: ... # type: ignore [override]1027 def xread(self, streams, count=..., block=...) -> Pipeline[_StrType]: ... # type: ignore [override]1028 def xreadgroup(self, groupname, consumername, streams, count=..., block=..., noack=...) -> Pipeline[_StrType]: ... # type: ignore [override]1029 def xrevrange(self, name, max=..., min=..., count=...) -> Pipeline[_StrType]: ... # type: ignore [override]1030 def xtrim(self, name, maxlen, approximate=...) -> Pipeline[_StrType]: ... # type: ignore [override]1031 def zadd( # type: ignore [override]1032 self, name: _Key, mapping: Mapping[_Key, _Value], nx: bool = ..., xx: bool = ..., ch: bool = ..., incr: bool = ...1033 ) -> Pipeline[_StrType]: ...1034 def zcard(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore [override]1035 def zcount(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ... # type: ignore [override]1036 def zincrby(self, name: _Key, amount: float, value: _Value) -> Pipeline[_StrType]: ... # type: ignore [override]1037 def zinterstore(self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] = ...) -> Pipeline[_StrType]: ... # type: ignore [override]1038 def zlexcount(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ... # type: ignore [override]1039 def zpopmax(self, name: _Key, count: Optional[int] = ...) -> Pipeline[_StrType]: ... # type: ignore [override]1040 def zpopmin(self, name: _Key, count: Optional[int] = ...) -> Pipeline[_StrType]: ... # type: ignore [override]1041 def bzpopmax(self, keys: Union[_Key, Iterable[_Key]], timeout: float = ...) -> Pipeline[_StrType]: ... # type: ignore [override]1042 def bzpopmin(self, keys: Union[_Key, Iterable[_Key]], timeout: float = ...) -> Pipeline[_StrType]: ... # type: ignore [override]1043 def zrange( # type: ignore [override]1044 self,1045 name: _Key,1046 start: int,1047 end: int,1048 desc: bool = ...,1049 withscores: bool = ...,1050 score_cast_func: Callable[[Any], Any] = ...,1051 ) -> Pipeline[_StrType]: ...1052 def zrangebylex(self, name: _Key, min: _Value, max: _Value, start: Optional[int] = ..., num: Optional[int] = ...) -> Pipeline[_StrType]: ... # type: ignore [override]1053 def zrangebyscore( # type: ignore [override]1054 self,1055 name: _Key,1056 min: _Value,1057 max: _Value,1058 start: Optional[int] = ...,1059 num: Optional[int] = ...,1060 withscores: bool = ...,1061 score_cast_func: Callable[[Any], Any] = ...,1062 ) -> Pipeline[_StrType]: ...1063 def zrank(self, name: _Key, value: _Value) -> Pipeline[_StrType]: ... # type: ignore [override]1064 def zrem(self, name: _Key, *values: _Value) -> Pipeline[_StrType]: ... # type: ignore [override]1065 def zremrangebylex(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ... # type: ignore [override]1066 def zremrangebyrank(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ... # type: ignore [override]1067 def zremrangebyscore(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ... # type: ignore [override]1068 def zrevrange( # type: ignore [override]1069 self,1070 name: _Key,1071 start: int,1072 end: int,1073 desc: bool = ...,1074 withscores: bool = ...,1075 score_cast_func: Callable[[Any], Any] = ...,1076 ) -> Pipeline[_StrType]: ...1077 def zrevrangebyscore( # type: ignore [override]1078 self,1079 name: _Key,1080 min: _Value,1081 max: _Value,1082 start: Optional[int] = ...,1083 num: Optional[int] = ...,1084 withscores: bool = ...,1085 score_cast_func: Callable[[Any], Any] = ...,1086 ) -> Pipeline[_StrType]: ...1087 def zrevrangebylex( # type: ignore [override]1088 self, name: _Key, min: _Value, max: _Value, start: Optional[int] = ..., num: Optional[int] = ...1089 ) -> Pipeline[_StrType]: ...1090 def zrevrank(self, name: _Key, value: _Value) -> Pipeline[_StrType]: ... # type: ignore [override]1091 def zscore(self, name: _Key, value: _Value) -> Pipeline[_StrType]: ... # type: ignore [override]1092 def zunionstore(self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] = ...) -> Pipeline[_StrType]: ... # type: ignore [override]1093 def pfadd(self, name: _Key, *values: _Value) -> Pipeline[_StrType]: ... # type: ignore [override]1094 def pfcount(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore [override]1095 def pfmerge(self, dest: _Key, *sources: _Key) -> Pipeline[_StrType]: ... # type: ignore [override]1096 def hdel(self, name: _Key, *keys: _Key) -> Pipeline[_StrType]: ... # type: ignore [override]1097 def hexists(self, name: _Key, key: _Key) -> Pipeline[_StrType]: ... # type: ignore [override]1098 def hget(self, name: _Key, key: _Key) -> Pipeline[_StrType]: ... # type: ignore [override]1099 def hgetall(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore [override]1100 def hincrby(self, name: _Key, key: _Key, amount: int = ...) -> Pipeline[_StrType]: ... # type: ignore [override]1101 def hincrbyfloat(self, name: _Key, key: _Key, amount: float = ...) -> Pipeline[_StrType]: ... # type: ignore [override]1102 def hkeys(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore [override]1103 def hlen(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore [override]1104 @overload # type: ignore [override]1105 def hset(1106 self, name: _Key, key: _Key, value: _Value, mapping: Optional[Mapping[_Key, _Value]] = ...1107 ) -> Pipeline[_StrType]: ...1108 @overload # type: ignore [override]1109 def hset(self, name: _Key, key: None, value: None, mapping: Mapping[_Key, _Value]) -> Pipeline[_StrType]: ...1110 @overload # type: ignore [override]1111 def hset(self, name: _Key, *, mapping: Mapping[_Key, _Value]) -> Pipeline[_StrType]: ...1112 def hsetnx(self, name: _Key, key: _Key, value: _Value) -> Pipeline[_StrType]: ... # type: ignore [override]1113 def hmset(self, name: _Key, mapping: Mapping[_Key, _Value]) -> Pipeline[_StrType]: ... # type: ignore [override]1114 def hmget(self, name: _Key, keys: Union[_Key, Iterable[_Key]], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore [override]1115 def hvals(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore [override]1116 def publish(self, channel: _Key, message: _Key) -> Pipeline[_StrType]: ... # type: ignore [override]1117 def eval(self, script, numkeys, *keys_and_args) -> Pipeline[_StrType]: ... # type: ignore [override]1118 def evalsha(self, sha, numkeys, *keys_and_args) -> Pipeline[_StrType]: ... # type: ignore [override]1119 def script_exists(self, *args) -> Pipeline[_StrType]: ... # type: ignore [override]1120 def script_flush(self) -> Pipeline[_StrType]: ... # type: ignore [override]1121 def script_kill(self) -> Pipeline[_StrType]: ... # type: ignore [override]1122 def script_load(self, script) -> Pipeline[_StrType]: ... # type: ignore [override]1123 def register_script(self, script: Union[Text, _StrType]) -> Script: ...1124 def pubsub_channels(self, pattern: _Key = ...) -> Pipeline[_StrType]: ... # type: ignore [override]1125 def pubsub_numsub(self, *args: _Key) -> Pipeline[_StrType]: ... # type: ignore [override]1126 def pubsub_numpat(self) -> Pipeline[_StrType]: ... # type: ignore [override]1127 def monitor(self) -> Monitor: ...1128 def cluster(self, cluster_arg: str, *args: Any) -> Pipeline[_StrType]: ... # type: ignore [override]1129 def client(self) -> Any: ...1130class Script:1131 registered_client: Any1132 script: Any1133 sha: Any1134 def __init__(self, registered_client, script) -> None: ...1135 def __call__(self, keys=..., args=..., client=...): ...1136class Monitor(object):1137 def __init__(self, connection_pool) -> None: ...1138 def __enter__(self) -> Monitor: ...1139 def __exit__(self, *args: Any) -> None: ...1140 def next_command(self) -> Dict[Text, Any]: ......

Full Screen

Full Screen

transformed_distribution.py

Source:transformed_distribution.py Github

copy

Full Screen

...242 bijector = bijectors.Identity(validate_args=validate_args)243 # We will keep track of a static and dynamic version of244 # self._is_{batch,event}_override. This way we can do more prior to graph245 # execution, including possibly raising Python exceptions.246 self._override_batch_shape = self._maybe_validate_shape_override(247 batch_shape, distribution.is_scalar_batch(), validate_args,248 "batch_shape")249 self._is_batch_override = _logical_not(_logical_equal(250 _ndims_from_shape(self._override_batch_shape), self._zero))251 self._is_maybe_batch_override = bool(252 tensor_util.constant_value(self._override_batch_shape) is None or253 tensor_util.constant_value(self._override_batch_shape))254 self._override_event_shape = self._maybe_validate_shape_override(255 event_shape, distribution.is_scalar_event(), validate_args,256 "event_shape")257 self._is_event_override = _logical_not(_logical_equal(258 _ndims_from_shape(self._override_event_shape), self._zero))259 self._is_maybe_event_override = bool(260 tensor_util.constant_value(self._override_event_shape) is None or261 tensor_util.constant_value(self._override_event_shape))262 # To convert a scalar distribution into a multivariate distribution we263 # will draw dims from the sample dims, which are otherwise iid. This is264 # easy to do except in the case that the base distribution has batch dims265 # and we're overriding event shape. When that case happens the event dims266 # will incorrectly be to the left of the batch dims. In this case we'll267 # cyclically permute left the new dims.268 self._needs_rotation = _logical_and(269 self._is_event_override,270 _logical_not(self._is_batch_override),271 _logical_not(distribution.is_scalar_batch()))272 override_event_ndims = _ndims_from_shape(self._override_event_shape)273 self._rotate_ndims = _pick_scalar_condition(274 self._needs_rotation, override_event_ndims, 0)275 # We'll be reducing the head dims (if at all), i.e., this will be []276 # if we don't need to reduce.277 self._reduce_event_indices = math_ops.range(278 self._rotate_ndims - override_event_ndims, self._rotate_ndims)279 self._distribution = distribution280 self._bijector = bijector281 super(TransformedDistribution, self).__init__(282 dtype=self._distribution.dtype,283 is_continuous=self._distribution.is_continuous,284 is_reparameterized=self._distribution.is_reparameterized,285 validate_args=validate_args,286 allow_nan_stats=self._distribution.allow_nan_stats,287 parameters=parameters,288 # We let TransformedDistribution access _graph_parents since this class289 # is more like a baseclass than derived.290 graph_parents=(distribution._graph_parents + # pylint: disable=protected-access291 bijector.graph_parents),292 name=name)293 @property294 def distribution(self):295 """Base distribution, p(x)."""296 return self._distribution297 @property298 def bijector(self):299 """Function transforming x => y."""300 return self._bijector301 def _event_shape(self):302 return self.bijector.forward_event_shape(distribution_util.pick_vector(303 self._is_event_override,304 self._override_event_shape,305 self.distribution.event_shape()))306 def _get_event_shape(self):307 static_override = tensor_util.constant_value(self._override_event_shape)308 return self.bijector.get_forward_event_shape(309 self.distribution.get_event_shape()310 if static_override is not None and not static_override311 else tensor_shape.TensorShape(static_override))312 def _batch_shape(self):313 return distribution_util.pick_vector(314 self._is_batch_override,315 self._override_batch_shape,316 self.distribution.batch_shape())317 def _get_batch_shape(self):318 static_override = tensor_util.constant_value(self._override_batch_shape)319 if static_override is not None and not static_override:320 return self.distribution.get_batch_shape()321 return tensor_shape.TensorShape(static_override)322 @distribution_util.AppendDocstring(323 """Samples from the base distribution and then passes through324 the bijector's forward transform.""",325 condition_kwargs_dict=_condition_kwargs_dict)326 def _sample_n(self, n, seed=None,327 bijector_kwargs=None, distribution_kwargs=None):328 bijector_kwargs = bijector_kwargs or {}329 distribution_kwargs = distribution_kwargs or {}330 sample_shape = _concat_vectors(331 distribution_util.pick_vector(self._needs_rotation, self._empty, [n]),332 self._override_batch_shape,333 self._override_event_shape,334 distribution_util.pick_vector(self._needs_rotation, [n], self._empty))335 x = self.distribution.sample(sample_shape=sample_shape, seed=seed,336 **distribution_kwargs)337 x = self._maybe_rotate_dims(x)338 return self.bijector.forward(x, **bijector_kwargs)339 @distribution_util.AppendDocstring(340 """Implements `(log o p o g^{-1})(y) + (log o abs o det o J o g^{-1})(y)`,341 where `g^{-1}` is the inverse of `transform`.342 Also raises a `ValueError` if `inverse` was not provided to the343 distribution and `y` was not returned from `sample`.""",344 condition_kwargs_dict=_condition_kwargs_dict)345 def _log_prob(self, y, bijector_kwargs=None, distribution_kwargs=None):346 bijector_kwargs = bijector_kwargs or {}347 distribution_kwargs = distribution_kwargs or {}348 x, ildj = self.bijector.inverse_and_inverse_log_det_jacobian(349 y, **bijector_kwargs)350 x = self._maybe_rotate_dims(x, rotate_right=True)351 log_prob = self.distribution.log_prob(x, **distribution_kwargs)352 if self._is_maybe_event_override:353 log_prob = math_ops.reduce_sum(log_prob, self._reduce_event_indices)354 return ildj + log_prob355 @distribution_util.AppendDocstring(356 """Implements `p(g^{-1}(y)) det|J(g^{-1}(y))|`, where `g^{-1}` is the357 inverse of `transform`.358 Also raises a `ValueError` if `inverse` was not provided to the359 distribution and `y` was not returned from `sample`.""",360 condition_kwargs_dict=_condition_kwargs_dict)361 def _prob(self, y, bijector_kwargs=None, distribution_kwargs=None):362 bijector_kwargs = bijector_kwargs or {}363 distribution_kwargs = distribution_kwargs or {}364 x, ildj = self.bijector.inverse_and_inverse_log_det_jacobian(365 y, **bijector_kwargs)366 x = self._maybe_rotate_dims(x, rotate_right=True)367 prob = self.distribution.prob(x, **distribution_kwargs)368 if self._is_maybe_event_override:369 prob = math_ops.reduce_prod(prob, self._reduce_event_indices)370 return math_ops.exp(ildj) * prob371 @distribution_util.AppendDocstring(372 condition_kwargs_dict=_condition_kwargs_dict)373 def _log_cdf(self, y, bijector_kwargs=None, distribution_kwargs=None):374 if self._is_maybe_event_override:375 raise NotImplementedError("log_cdf is not implemented when overriding "376 "event_shape")377 bijector_kwargs = bijector_kwargs or {}378 distribution_kwargs = distribution_kwargs or {}379 x = self.bijector.inverse(y, **bijector_kwargs)380 return self.distribution.log_cdf(x, **distribution_kwargs)381 @distribution_util.AppendDocstring(382 condition_kwargs_dict=_condition_kwargs_dict)383 def _cdf(self, y, bijector_kwargs=None, distribution_kwargs=None):384 if self._is_maybe_event_override:385 raise NotImplementedError("cdf is not implemented when overriding "386 "event_shape")387 bijector_kwargs = bijector_kwargs or {}388 distribution_kwargs = distribution_kwargs or {}389 x = self.bijector.inverse(y, **bijector_kwargs)390 return self.distribution.cdf(x, **distribution_kwargs)391 @distribution_util.AppendDocstring(392 condition_kwargs_dict=_condition_kwargs_dict)393 def _log_survival_function(self, y,394 bijector_kwargs=None, distribution_kwargs=None):395 if self._is_maybe_event_override:396 raise NotImplementedError("log_survival_function is not implemented when "397 "overriding event_shape")398 bijector_kwargs = bijector_kwargs or {}399 distribution_kwargs = distribution_kwargs or {}400 x = self.bijector.inverse(y, **bijector_kwargs)401 return self.distribution.log_survival_function(x, **distribution_kwargs)402 @distribution_util.AppendDocstring(403 condition_kwargs_dict=_condition_kwargs_dict)404 def _survival_function(self, y,405 bijector_kwargs=None, distribution_kwargs=None):406 if self._is_maybe_event_override:407 raise NotImplementedError("survival_function is not implemented when "408 "overriding event_shape")409 bijector_kwargs = bijector_kwargs or {}410 distribution_kwargs = distribution_kwargs or {}411 x = self.bijector.inverse(y, **bijector_kwargs)412 return self.distribution.survival_function(x, **distribution_kwargs)413 def _entropy(self):414 if (not self.distribution.is_continuous or415 not self.bijector.is_constant_jacobian):416 raise NotImplementedError("entropy is not implemented")417 # Suppose Y = g(X) where g is a diffeomorphism and X is a continuous rv. It418 # can be shown that:419 # H[Y] = H[X] + E_X[(log o abs o det o J o g)(X)].420 # If is_constant_jacobian then:421 # E_X[(log o abs o det o J o g)(X)] = (log o abs o det o J o g)(c)422 # where c can by anything.423 entropy = self.distribution.entropy()424 if self._is_maybe_event_override:425 # H[X] = sum_i H[X_i] if X_i are mutually independent.426 # This means that a reduce_sum is a simple rescaling.427 entropy *= math_ops.cast(math_ops.reduce_prod(self._override_event_shape),428 dtype=entropy.dtype.base_dtype)429 if self._is_maybe_batch_override:430 new_shape = array_ops.concat([431 _ones_like(self._override_batch_shape),432 self.distribution.batch_shape()433 ], 0)434 entropy = array_ops.reshape(entropy, new_shape)435 multiples = array_ops.concat([436 self._override_batch_shape,437 _ones_like(self.distribution.batch_shape())438 ], 0)439 entropy = array_ops.tile(entropy, multiples)440 dummy = 0.441 return entropy - self.bijector.inverse_log_det_jacobian(dummy)442 def _maybe_validate_shape_override(self, override_shape, base_is_scalar,443 validate_args, name):444 """Helper to __init__ which ensures override batch/event_shape are valid."""445 if override_shape is None:446 override_shape = []447 override_shape = ops.convert_to_tensor(override_shape, dtype=dtypes.int32,448 name=name)449 if not override_shape.dtype.is_integer:450 raise TypeError("shape override must be an integer")451 override_is_scalar = _is_scalar_from_shape(override_shape)452 if tensor_util.constant_value(override_is_scalar):453 return self._empty454 dynamic_assertions = []455 if override_shape.get_shape().ndims is not None:456 if override_shape.get_shape().ndims != 1:...

Full Screen

Full Screen

tests.py

Source:tests.py Github

copy

Full Screen

...20@override_settings(ITEMS=['a', 'c', 'e'], ITEMS_OUTER=[1, 2, 3],21 TEST='override', TEST_OUTER='outer')22class FullyDecoratedTranTestCase(TransactionTestCase):23 available_apps = []24 def test_override(self):25 self.assertListEqual(settings.ITEMS, ['b', 'c', 'd'])26 self.assertListEqual(settings.ITEMS_OUTER, [1, 2, 3])27 self.assertEqual(settings.TEST, 'override')28 self.assertEqual(settings.TEST_OUTER, 'outer')29 @modify_settings(ITEMS={30 'append': ['e', 'f'],31 'prepend': ['a'],32 'remove': ['d', 'c'],33 })34 def test_method_list_override(self):35 self.assertListEqual(settings.ITEMS, ['a', 'b', 'e', 'f'])36 self.assertListEqual(settings.ITEMS_OUTER, [1, 2, 3])37 @modify_settings(ITEMS={38 'append': ['b'],39 'prepend': ['d'],40 'remove': ['a', 'c', 'e'],41 })42 def test_method_list_override_no_ops(self):43 self.assertListEqual(settings.ITEMS, ['b', 'd'])44 @modify_settings(ITEMS={45 'append': 'e',46 'prepend': 'a',47 'remove': 'c',48 })49 def test_method_list_override_strings(self):50 self.assertListEqual(settings.ITEMS, ['a', 'b', 'd', 'e'])51 @modify_settings(ITEMS={'remove': ['b', 'd']})52 @modify_settings(ITEMS={'append': ['b'], 'prepend': ['d']})53 def test_method_list_override_nested_order(self):54 self.assertListEqual(settings.ITEMS, ['d', 'c', 'b'])55 @override_settings(TEST='override2')56 def test_method_override(self):57 self.assertEqual(settings.TEST, 'override2')58 self.assertEqual(settings.TEST_OUTER, 'outer')59 def test_decorated_testcase_name(self):60 self.assertEqual(FullyDecoratedTranTestCase.__name__, 'FullyDecoratedTranTestCase')61 def test_decorated_testcase_module(self):62 self.assertEqual(FullyDecoratedTranTestCase.__module__, __name__)63@modify_settings(ITEMS={64 'prepend': ['b'],65 'append': ['d'],66 'remove': ['a', 'e']67})68@override_settings(ITEMS=['a', 'c', 'e'], TEST='override')69class FullyDecoratedTestCase(TestCase):70 def test_override(self):71 self.assertListEqual(settings.ITEMS, ['b', 'c', 'd'])72 self.assertEqual(settings.TEST, 'override')73 @modify_settings(ITEMS={74 'append': 'e',75 'prepend': 'a',76 'remove': 'c',77 })78 @override_settings(TEST='override2')79 def test_method_override(self):80 self.assertListEqual(settings.ITEMS, ['a', 'b', 'd', 'e'])81 self.assertEqual(settings.TEST, 'override2')82class ClassDecoratedTestCaseSuper(TestCase):83 """84 Dummy class for testing max recursion error in child class call to85 super(). Refs #17011.86 """87 def test_max_recursion_error(self):88 pass89@override_settings(TEST='override')90class ClassDecoratedTestCase(ClassDecoratedTestCaseSuper):91 @classmethod92 def setUpClass(cls):93 super(ClassDecoratedTestCase, cls).setUpClass()94 cls.foo = getattr(settings, 'TEST', 'BUG')95 def test_override(self):96 self.assertEqual(settings.TEST, 'override')97 def test_setupclass_override(self):98 """Test that settings are overridden within setUpClass -- refs #21281"""99 self.assertEqual(self.foo, 'override')100 @override_settings(TEST='override2')101 def test_method_override(self):102 self.assertEqual(settings.TEST, 'override2')103 def test_max_recursion_error(self):104 """105 Overriding a method on a super class and then calling that method on106 the super class should not trigger infinite recursion. See #17011.107 """108 try:109 super(ClassDecoratedTestCase, self).test_max_recursion_error()110 except RuntimeError:111 self.fail()112@modify_settings(ITEMS={'append': 'mother'})113@override_settings(ITEMS=['father'], TEST='override-parent')114class ParentDecoratedTestCase(TestCase):115 pass116@modify_settings(ITEMS={'append': ['child']})117@override_settings(TEST='override-child')118class ChildDecoratedTestCase(ParentDecoratedTestCase):119 def test_override_settings_inheritance(self):120 self.assertEqual(settings.ITEMS, ['father', 'mother', 'child'])121 self.assertEqual(settings.TEST, 'override-child')122class SettingsTests(SimpleTestCase):123 def setUp(self):124 self.testvalue = None125 signals.setting_changed.connect(self.signal_callback)126 def tearDown(self):127 signals.setting_changed.disconnect(self.signal_callback)128 def signal_callback(self, sender, setting, value, **kwargs):129 if setting == 'TEST':130 self.testvalue = value131 def test_override(self):132 settings.TEST = 'test'133 self.assertEqual('test', settings.TEST)134 with self.settings(TEST='override'):135 self.assertEqual('override', settings.TEST)136 self.assertEqual('test', settings.TEST)137 del settings.TEST138 def test_override_change(self):139 settings.TEST = 'test'140 self.assertEqual('test', settings.TEST)141 with self.settings(TEST='override'):142 self.assertEqual('override', settings.TEST)143 settings.TEST = 'test2'144 self.assertEqual('test', settings.TEST)145 del settings.TEST146 def test_override_doesnt_leak(self):147 self.assertRaises(AttributeError, getattr, settings, 'TEST')148 with self.settings(TEST='override'):149 self.assertEqual('override', settings.TEST)150 settings.TEST = 'test'151 self.assertRaises(AttributeError, getattr, settings, 'TEST')152 @override_settings(TEST='override')153 def test_decorator(self):154 self.assertEqual('override', settings.TEST)155 def test_context_manager(self):156 self.assertRaises(AttributeError, getattr, settings, 'TEST')157 override = override_settings(TEST='override')158 self.assertRaises(AttributeError, getattr, settings, 'TEST')159 override.enable()160 self.assertEqual('override', settings.TEST)161 override.disable()162 self.assertRaises(AttributeError, getattr, settings, 'TEST')163 def test_class_decorator(self):164 # SimpleTestCase can be decorated by override_settings, but not ut.TestCase165 class SimpleTestCaseSubclass(SimpleTestCase):166 pass167 class UnittestTestCaseSubclass(unittest.TestCase):168 pass169 decorated = override_settings(TEST='override')(SimpleTestCaseSubclass)170 self.assertIsInstance(decorated, type)171 self.assertTrue(issubclass(decorated, SimpleTestCase))172 with six.assertRaisesRegex(self, Exception,173 "Only subclasses of Django SimpleTestCase*"):174 decorated = override_settings(TEST='override')(UnittestTestCaseSubclass)175 def test_signal_callback_context_manager(self):176 self.assertRaises(AttributeError, getattr, settings, 'TEST')177 with self.settings(TEST='override'):178 self.assertEqual(self.testvalue, 'override')179 self.assertEqual(self.testvalue, None)180 @override_settings(TEST='override')181 def test_signal_callback_decorator(self):182 self.assertEqual(self.testvalue, 'override')183 #184 # Regression tests for #10130: deleting settings.185 #186 def test_settings_delete(self):187 settings.TEST = 'test'188 self.assertEqual('test', settings.TEST)189 del settings.TEST190 self.assertRaises(AttributeError, getattr, settings, 'TEST')191 def test_settings_delete_wrapped(self):192 self.assertRaises(TypeError, delattr, settings, '_wrapped')193 def test_override_settings_delete(self):194 """195 Allow deletion of a setting in an overridden settings set (#18824)196 """197 previous_i18n = settings.USE_I18N198 previous_l10n = settings.USE_L10N199 with self.settings(USE_I18N=False):200 del settings.USE_I18N201 self.assertRaises(AttributeError, getattr, settings, 'USE_I18N')202 # Should also work for a non-overridden setting203 del settings.USE_L10N204 self.assertRaises(AttributeError, getattr, settings, 'USE_L10N')205 self.assertEqual(settings.USE_I18N, previous_i18n)206 self.assertEqual(settings.USE_L10N, previous_l10n)207 def test_override_settings_nested(self):208 """209 Test that override_settings uses the actual _wrapped attribute at210 runtime, not when it was instantiated.211 """212 self.assertRaises(AttributeError, getattr, settings, 'TEST')213 self.assertRaises(AttributeError, getattr, settings, 'TEST2')214 inner = override_settings(TEST2='override')215 with override_settings(TEST='override'):216 self.assertEqual('override', settings.TEST)217 with inner:218 self.assertEqual('override', settings.TEST)219 self.assertEqual('override', settings.TEST2)220 # inner's __exit__ should have restored the settings of the outer221 # context manager, not those when the class was instantiated222 self.assertEqual('override', settings.TEST)223 self.assertRaises(AttributeError, getattr, settings, 'TEST2')224 self.assertRaises(AttributeError, getattr, settings, 'TEST')225 self.assertRaises(AttributeError, getattr, settings, 'TEST2')226class TestComplexSettingOverride(SimpleTestCase):227 def setUp(self):228 self.old_warn_override_settings = signals.COMPLEX_OVERRIDE_SETTINGS.copy()229 signals.COMPLEX_OVERRIDE_SETTINGS.add('TEST_WARN')230 def tearDown(self):231 signals.COMPLEX_OVERRIDE_SETTINGS = self.old_warn_override_settings232 self.assertNotIn('TEST_WARN', signals.COMPLEX_OVERRIDE_SETTINGS)233 def test_complex_override_warning(self):234 """Regression test for #19031"""235 with warnings.catch_warnings(record=True) as w:236 warnings.simplefilter("always")237 with override_settings(TEST_WARN='override'):238 self.assertEqual(settings.TEST_WARN, 'override')239 self.assertEqual(len(w), 1)240 # File extension may by .py, .pyc, etc. Compare only basename.241 self.assertEqual(os.path.splitext(w[0].filename)[0],242 os.path.splitext(__file__)[0])243 self.assertEqual(str(w[0].message),244 'Overriding setting TEST_WARN can lead to unexpected behavior.')245class TrailingSlashURLTests(SimpleTestCase):246 """247 Tests for the MEDIA_URL and STATIC_URL settings.248 They must end with a slash to ensure there's a deterministic way to build249 paths in templates.250 """251 settings_module = settings252 def setUp(self):253 self._original_media_url = self.settings_module.MEDIA_URL254 self._original_static_url = self.settings_module.STATIC_URL255 def tearDown(self):256 self.settings_module.MEDIA_URL = self._original_media_url257 self.settings_module.STATIC_URL = self._original_static_url258 def test_blank(self):259 """260 The empty string is accepted, even though it doesn't end in a slash.261 """262 self.settings_module.MEDIA_URL = ''263 self.assertEqual('', self.settings_module.MEDIA_URL)264 self.settings_module.STATIC_URL = ''265 self.assertEqual('', self.settings_module.STATIC_URL)266 def test_end_slash(self):267 """268 It works if the value ends in a slash.269 """270 self.settings_module.MEDIA_URL = '/foo/'271 self.assertEqual('/foo/', self.settings_module.MEDIA_URL)272 self.settings_module.MEDIA_URL = 'http://media.foo.com/'273 self.assertEqual('http://media.foo.com/',274 self.settings_module.MEDIA_URL)275 self.settings_module.STATIC_URL = '/foo/'276 self.assertEqual('/foo/', self.settings_module.STATIC_URL)277 self.settings_module.STATIC_URL = 'http://static.foo.com/'278 self.assertEqual('http://static.foo.com/',279 self.settings_module.STATIC_URL)280 def test_no_end_slash(self):281 """282 An ImproperlyConfigured exception is raised if the value doesn't end283 in a slash.284 """285 with self.assertRaises(ImproperlyConfigured):286 self.settings_module.MEDIA_URL = '/foo'287 with self.assertRaises(ImproperlyConfigured):288 self.settings_module.MEDIA_URL = 'http://media.foo.com'289 with self.assertRaises(ImproperlyConfigured):290 self.settings_module.STATIC_URL = '/foo'291 with self.assertRaises(ImproperlyConfigured):292 self.settings_module.STATIC_URL = 'http://static.foo.com'293 def test_double_slash(self):294 """295 If the value ends in more than one slash, presume they know what296 they're doing.297 """298 self.settings_module.MEDIA_URL = '/wrong//'299 self.assertEqual('/wrong//', self.settings_module.MEDIA_URL)300 self.settings_module.MEDIA_URL = 'http://media.foo.com/wrong//'301 self.assertEqual('http://media.foo.com/wrong//',302 self.settings_module.MEDIA_URL)303 self.settings_module.STATIC_URL = '/wrong//'304 self.assertEqual('/wrong//', self.settings_module.STATIC_URL)305 self.settings_module.STATIC_URL = 'http://static.foo.com/wrong//'306 self.assertEqual('http://static.foo.com/wrong//',307 self.settings_module.STATIC_URL)308class SecureProxySslHeaderTest(SimpleTestCase):309 settings_module = settings310 def setUp(self):311 self._original_setting = self.settings_module.SECURE_PROXY_SSL_HEADER312 def tearDown(self):313 self.settings_module.SECURE_PROXY_SSL_HEADER = self._original_setting314 def test_none(self):315 self.settings_module.SECURE_PROXY_SSL_HEADER = None316 req = HttpRequest()317 self.assertEqual(req.is_secure(), False)318 def test_set_without_xheader(self):319 self.settings_module.SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTOCOL', 'https')320 req = HttpRequest()321 self.assertEqual(req.is_secure(), False)322 def test_set_with_xheader_wrong(self):323 self.settings_module.SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTOCOL', 'https')324 req = HttpRequest()325 req.META['HTTP_X_FORWARDED_PROTOCOL'] = 'wrongvalue'326 self.assertEqual(req.is_secure(), False)327 def test_set_with_xheader_right(self):328 self.settings_module.SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTOCOL', 'https')329 req = HttpRequest()330 req.META['HTTP_X_FORWARDED_PROTOCOL'] = 'https'331 self.assertEqual(req.is_secure(), True)332class IsOverriddenTest(SimpleTestCase):333 def test_configure(self):334 s = LazySettings()335 s.configure(SECRET_KEY='foo')336 self.assertTrue(s.is_overridden('SECRET_KEY'))337 def test_module(self):338 settings_module = ModuleType('fake_settings_module')339 settings_module.SECRET_KEY = 'foo'340 sys.modules['fake_settings_module'] = settings_module341 try:342 s = Settings('fake_settings_module')343 self.assertTrue(s.is_overridden('SECRET_KEY'))344 self.assertFalse(s.is_overridden('ALLOWED_HOSTS'))345 finally:346 del sys.modules['fake_settings_module']347 def test_override(self):348 self.assertFalse(settings.is_overridden('ALLOWED_HOSTS'))349 with override_settings(ALLOWED_HOSTS=[]):350 self.assertTrue(settings.is_overridden('ALLOWED_HOSTS'))351 def test_unevaluated_lazysettings_repr(self):352 lazy_settings = LazySettings()353 expected = '<LazySettings [Unevaluated]>'354 self.assertEqual(repr(lazy_settings), expected)355 def test_evaluated_lazysettings_repr(self):356 lazy_settings = LazySettings()357 module = os.environ.get(ENVIRONMENT_VARIABLE)358 expected = '<LazySettings "%s">' % module359 # Force evaluation of the lazy object.360 lazy_settings.APPEND_SLASH361 self.assertEqual(repr(lazy_settings), expected)...

Full Screen

Full Screen

runner.py

Source:runner.py Github

copy

Full Screen

...3from runner_helper import Arch, RunConfig, ConfigList, App, Dataset, CachePolicy, TMP_LOG_DIR, run_in_list, SampleType, percent_gen4do_mock = False5durable_log = True6cur_common_base = (ConfigList()7 .override('copy_job', [1])8 .override('sample_job', [1])9 .override('pipeline', [True])10 .override('epoch', [3])11 .override('num_sampler', [2])12 .override('num_trainer', [6])13 .override('logdir', ['run-logs',])14 .override('profile_level', [3])15 .override('log_level', ['error'])16 .override('multi_gpu', [True])17 .override('cache_policy', [18 # CachePolicy.cache_by_random,19 CachePolicy.cache_by_degree,20 # CachePolicy.cache_by_presample_1,21 ]))22cfg_list_collector = ConfigList.Empty()23# cur_common_base = (cur_common_base.copy().override('app', [App.gcn ]).override('sample_type', [SampleType.kKHop2]))24# cfg_list_collector.concat(cur_common_base.copy().override('dataset', [Dataset.twitter, ]).override('cache_percent', percent_gen(21, 21, 1)))25# cfg_list_collector.concat(cur_common_base.copy().override('dataset', [Dataset.papers100M, ]).override('cache_percent', percent_gen(19, 19, 1)))26# cfg_list_collector.concat(cur_common_base.copy().override('dataset', [Dataset.uk_2006_05, ]).override('cache_percent', percent_gen(10, 10, 1)))27# cur_common_base = (cur_common_base.copy().override('app', [App.graphsage ]).override('sample_type', [SampleType.kKHop2]))28# cfg_list_collector.concat(cur_common_base.copy().override('dataset', [Dataset.twitter, ]).override('cache_percent', percent_gen(29, 29, 1)))29# cfg_list_collector.concat(cur_common_base.copy().override('dataset', [Dataset.papers100M, ]).override('cache_percent', percent_gen(24, 24, 1)))30# cfg_list_collector.concat(cur_common_base.copy().override('dataset', [Dataset.uk_2006_05, ]).override('cache_percent', percent_gen(15, 15, 1)).override('num_sampler', [1]).override('num_trainer', [7]))31# cur_common_base = (cur_common_base.copy().override('app', [App.pinsage ]).override('sample_type', [SampleType.kRandomWalk]))32# cfg_list_collector.concat(cur_common_base.copy().override('dataset', [Dataset.twitter, ]).override('cache_percent', percent_gen(23, 23, 1)).override('num_sampler', [1]).override('num_trainer', [7]))33# cfg_list_collector.concat(cur_common_base.copy().override('dataset', [Dataset.papers100M, ]).override('cache_percent', percent_gen(21, 21, 1)).override('num_sampler', [1]).override('num_trainer', [7]))34# cfg_list_collector.concat(cur_common_base.copy().override('dataset', [Dataset.uk_2006_05, ]).override('cache_percent', percent_gen( 7, 7, 1)).override('num_sampler', [1]).override('num_trainer', [7]))35# cur_common_base = (cur_common_base.copy().override('app', [App.gcn ]).override('sample_type', [SampleType.kWeightedKHopPrefix]))36# cfg_list_collector.concat(cur_common_base.copy().override('dataset', [Dataset.twitter, ]).override('cache_percent', percent_gen(24, 24, 1)))37# cfg_list_collector.concat(cur_common_base.copy().override('dataset', [Dataset.papers100M, ]).override('cache_percent', percent_gen(21, 21, 1)))38cur_common_base = (cur_common_base.copy().override('cache_policy', [CachePolicy.cache_by_presample_1]))39cur_common_base = (cur_common_base.copy().override('app', [App.gcn ]).override('sample_type', [SampleType.kKHop2]))40cfg_list_collector.concat(cur_common_base.copy().override('dataset', [Dataset.twitter, ]).override('cache_percent', percent_gen(22, 22, 1)))41cfg_list_collector.concat(cur_common_base.copy().override('dataset', [Dataset.papers100M, ]).override('cache_percent', percent_gen(20, 20, 1)))42cfg_list_collector.concat(cur_common_base.copy().override('dataset', [Dataset.uk_2006_05, ]).override('cache_percent', percent_gen(11, 11, 1)))43cur_common_base = (cur_common_base.copy().override('app', [App.graphsage ]).override('sample_type', [SampleType.kKHop2]))44cfg_list_collector.concat(cur_common_base.copy().override('dataset', [Dataset.twitter, ]).override('cache_percent', percent_gen(31, 31, 1)))45cfg_list_collector.concat(cur_common_base.copy().override('dataset', [Dataset.papers100M, ]).override('cache_percent', percent_gen(24, 24, 1)))46cfg_list_collector.concat(cur_common_base.copy().override('dataset', [Dataset.uk_2006_05, ]).override('cache_percent', percent_gen(16, 16, 1)).override('num_sampler', [1]).override('num_trainer', [7]))47cur_common_base = (cur_common_base.copy().override('app', [App.pinsage ]).override('sample_type', [SampleType.kRandomWalk]))48cfg_list_collector.concat(cur_common_base.copy().override('dataset', [Dataset.twitter, ]).override('cache_percent', percent_gen(25, 25, 1)).override('num_sampler', [1]).override('num_trainer', [7]))49cfg_list_collector.concat(cur_common_base.copy().override('dataset', [Dataset.papers100M, ]).override('cache_percent', percent_gen(21, 21, 1)).override('num_sampler', [1]).override('num_trainer', [7]))50cfg_list_collector.concat(cur_common_base.copy().override('dataset', [Dataset.uk_2006_05, ]).override('cache_percent', percent_gen( 9, 9, 1)).override('num_sampler', [1]).override('num_trainer', [7]))51cur_common_base = (cur_common_base.copy().override('app', [App.gcn ]).override('sample_type', [SampleType.kWeightedKHopPrefix]))52cfg_list_collector.concat(cur_common_base.copy().override('dataset', [Dataset.twitter, ]).override('cache_percent', percent_gen(24, 24, 1)))53cfg_list_collector.concat(cur_common_base.copy().override('dataset', [Dataset.papers100M, ]).override('cache_percent', percent_gen(21, 21, 1)))54cur_common_base = (cur_common_base.copy().override('cache_policy', [CachePolicy.cache_by_degree]))55cur_common_base = (cur_common_base.copy().override('app', [App.gcn ]).override('sample_type', [SampleType.kKHop2]))56cfg_list_collector.concat(cur_common_base.copy().override('dataset', [Dataset.twitter, ]).override('cache_percent', percent_gen(22, 22, 1)))57cfg_list_collector.concat(cur_common_base.copy().override('dataset', [Dataset.papers100M, ]).override('cache_percent', percent_gen(20, 20, 1)))58cfg_list_collector.concat(cur_common_base.copy().override('dataset', [Dataset.uk_2006_05, ]).override('cache_percent', percent_gen(11, 11, 1)))59cur_common_base = (cur_common_base.copy().override('app', [App.graphsage ]).override('sample_type', [SampleType.kKHop2]))60cfg_list_collector.concat(cur_common_base.copy().override('dataset', [Dataset.twitter, ]).override('cache_percent', percent_gen(31, 31, 1)))61cfg_list_collector.concat(cur_common_base.copy().override('dataset', [Dataset.papers100M, ]).override('cache_percent', percent_gen(24, 24, 1)))62cfg_list_collector.concat(cur_common_base.copy().override('dataset', [Dataset.uk_2006_05, ]).override('cache_percent', percent_gen(16, 16, 1)).override('num_sampler', [1]).override('num_trainer', [7]))63cur_common_base = (cur_common_base.copy().override('app', [App.pinsage ]).override('sample_type', [SampleType.kRandomWalk]))64cfg_list_collector.concat(cur_common_base.copy().override('dataset', [Dataset.twitter, ]).override('cache_percent', percent_gen(25, 25, 1)).override('num_sampler', [1]).override('num_trainer', [7]))65cfg_list_collector.concat(cur_common_base.copy().override('dataset', [Dataset.papers100M, ]).override('cache_percent', percent_gen(21, 21, 1)).override('num_sampler', [1]).override('num_trainer', [7]))66cfg_list_collector.concat(cur_common_base.copy().override('dataset', [Dataset.uk_2006_05, ]).override('cache_percent', percent_gen( 9, 9, 1)).override('num_sampler', [1]).override('num_trainer', [7]))67cur_common_base = (cur_common_base.copy().override('app', [App.gcn ]).override('sample_type', [SampleType.kWeightedKHopPrefix]))68cfg_list_collector.concat(cur_common_base.copy().override('dataset', [Dataset.twitter, ]).override('cache_percent', percent_gen(24, 24, 1)))69cfg_list_collector.concat(cur_common_base.copy().override('dataset', [Dataset.papers100M, ]).override('cache_percent', percent_gen(21, 21, 1)))70cur_common_base = (cur_common_base.copy().override('cache_policy', [CachePolicy.cache_by_random]))71cur_common_base = (cur_common_base.copy().override('app', [App.gcn ]).override('sample_type', [SampleType.kKHop2]))72cfg_list_collector.concat(cur_common_base.copy().override('dataset', [Dataset.twitter, ]).override('cache_percent', percent_gen(21, 21, 1)))73cfg_list_collector.concat(cur_common_base.copy().override('dataset', [Dataset.papers100M, ]).override('cache_percent', percent_gen(18, 18, 1)))74cfg_list_collector.concat(cur_common_base.copy().override('dataset', [Dataset.uk_2006_05, ]).override('cache_percent', percent_gen(10, 10, 1)))75cur_common_base = (cur_common_base.copy().override('app', [App.graphsage ]).override('sample_type', [SampleType.kKHop2]))76cfg_list_collector.concat(cur_common_base.copy().override('dataset', [Dataset.twitter, ]).override('cache_percent', percent_gen(29, 29, 1)))77cfg_list_collector.concat(cur_common_base.copy().override('dataset', [Dataset.papers100M, ]).override('cache_percent', percent_gen(24, 24, 1)))78cfg_list_collector.concat(cur_common_base.copy().override('dataset', [Dataset.uk_2006_05, ]).override('cache_percent', percent_gen(15, 15, 1)).override('num_sampler', [1]).override('num_trainer', [7]))79cur_common_base = (cur_common_base.copy().override('app', [App.pinsage ]).override('sample_type', [SampleType.kRandomWalk]))80cfg_list_collector.concat(cur_common_base.copy().override('dataset', [Dataset.twitter, ]).override('cache_percent', percent_gen(20, 20, 1)).override('num_sampler', [1]).override('num_trainer', [7]))81cfg_list_collector.concat(cur_common_base.copy().override('dataset', [Dataset.papers100M, ]).override('cache_percent', percent_gen(20, 20, 1)).override('num_sampler', [1]).override('num_trainer', [7]))82cfg_list_collector.concat(cur_common_base.copy().override('dataset', [Dataset.uk_2006_05, ]).override('cache_percent', percent_gen( 7, 7, 1)).override('num_sampler', [1]).override('num_trainer', [7]))83cur_common_base = (cur_common_base.copy().override('app', [App.gcn ]).override('sample_type', [SampleType.kWeightedKHopPrefix]))84cfg_list_collector.concat(cur_common_base.copy().override('dataset', [Dataset.twitter, ]).override('cache_percent', percent_gen(24, 24, 1)))85cfg_list_collector.concat(cur_common_base.copy().override('dataset', [Dataset.papers100M, ]).override('cache_percent', percent_gen(21, 21, 1)))86if __name__ == '__main__':87 from sys import argv88 for arg in argv[1:]:89 if arg == '-m' or arg == '--mock':90 do_mock = True91 elif arg == '-i' or arg == '--interactive':92 durable_log = False...

Full Screen

Full Screen

bl_blendfile_library_overrides.py

Source:bl_blendfile_library_overrides.py Github

copy

Full Screen

1# Apache License, Version 2.02# ./blender.bin --background -noaudio --python tests/python/bl_blendfile_library_overrides.py -- --output-dir=/tmp/3import pathlib4import bpy5import sys6import os7import unittest8sys.path.append(os.path.dirname(os.path.realpath(__file__)))9from bl_blendfile_utils import TestHelper10class TestLibraryOverrides(TestHelper, unittest.TestCase):11 MESH_LIBRARY_PARENT = "LibMeshParent"12 OBJECT_LIBRARY_PARENT = "LibMeshParent"13 MESH_LIBRARY_CHILD = "LibMeshChild"14 OBJECT_LIBRARY_CHILD = "LibMeshChild"15 MESH_LIBRARY_PERMISSIVE = "LibMeshPermissive"16 OBJECT_LIBRARY_PERMISSIVE = "LibMeshPermissive"17 def __init__(self, args):18 self.args = args19 output_dir = pathlib.Path(self.args.output_dir)20 self.ensure_path(str(output_dir))21 self.output_path = output_dir / "blendlib_overrides.blend"22 self.test_output_path = output_dir / "blendlib_overrides_test.blend"23 bpy.ops.wm.read_homefile(use_empty=True, use_factory_startup=True)24 mesh = bpy.data.meshes.new(TestLibraryOverrides.MESH_LIBRARY_PARENT)25 obj = bpy.data.objects.new(TestLibraryOverrides.OBJECT_LIBRARY_PARENT, object_data=mesh)26 bpy.context.collection.objects.link(obj)27 mesh_child = bpy.data.meshes.new(TestLibraryOverrides.MESH_LIBRARY_CHILD)28 obj_child = bpy.data.objects.new(TestLibraryOverrides.OBJECT_LIBRARY_CHILD, object_data=mesh_child)29 obj_child.parent = obj30 bpy.context.collection.objects.link(obj_child)31 mesh = bpy.data.meshes.new(TestLibraryOverrides.MESH_LIBRARY_PERMISSIVE)32 obj = bpy.data.objects.new(TestLibraryOverrides.OBJECT_LIBRARY_PERMISSIVE, object_data=mesh)33 bpy.context.collection.objects.link(obj)34 obj.override_template_create()35 prop = obj.override_library.properties.add(rna_path='scale')36 prop.operations.add(operation='NOOP')37 bpy.ops.wm.save_as_mainfile(filepath=str(self.output_path), check_existing=False, compress=False)38 def test_link_and_override_property(self):39 bpy.ops.wm.read_homefile(use_empty=True, use_factory_startup=True)40 bpy.data.orphans_purge()41 link_dir = self.output_path / "Object"42 bpy.ops.wm.link(directory=str(link_dir), filename=TestLibraryOverrides.OBJECT_LIBRARY_PARENT)43 obj = bpy.data.objects[TestLibraryOverrides.OBJECT_LIBRARY_PARENT]44 self.assertIsNone(obj.override_library)45 local_id = obj.override_create()46 self.assertIsNotNone(local_id.override_library)47 self.assertIsNone(local_id.data.override_library)48 assert(len(local_id.override_library.properties) == 0)49 ##### Generate an override property & operation automaticaly by editing the local override data.50 local_id.location.y = 1.051 local_id.override_library.operations_update()52 assert(len(local_id.override_library.properties) == 1)53 override_prop = local_id.override_library.properties[0]54 assert(override_prop.rna_path == "location")55 assert(len(override_prop.operations) == 1)56 override_operation = override_prop.operations[0]57 assert(override_operation.operation == 'REPLACE')58 # Setting location.y overridded all elements in the location array. -1 is a wildcard.59 assert(override_operation.subitem_local_index == -1)60 ##### Reset the override to its linked reference data.61 local_id.override_library.reset()62 assert(len(local_id.override_library.properties) == 0)63 assert(local_id.location == local_id.override_library.reference.location)64 ##### Generate an override property & operation manually using the API.65 override_property = local_id.override_library.properties.add(rna_path="location")66 override_property.operations.add(operation='REPLACE')67 assert(len(local_id.override_library.properties) == 1)68 override_prop = local_id.override_library.properties[0]69 assert(override_prop.rna_path == "location")70 assert(len(override_prop.operations) == 1)71 override_operation = override_prop.operations[0]72 assert(override_operation.operation == 'REPLACE')73 # Setting location.y overridded all elements in the location array. -1 is a wildcard.74 assert(override_operation.subitem_local_index == -1)75 override_property = local_id.override_library.properties[0]76 override_property.operations.remove(override_property.operations[0])77 local_id.override_library.properties.remove(override_property)78 assert(len(local_id.override_library.properties) == 0)79 ##### Delete the override.80 local_id_name = local_id.name81 assert(bpy.data.objects.get((local_id_name, None), None) == local_id)82 local_id.override_library.destroy()83 assert(bpy.data.objects.get((local_id_name, None), None) == None)84 def test_link_permissive(self):85 """86 Linked assets with a permissive template.87 - Checks if the NOOP is properly handled.88 - Checks if the correct properties and operations are created/updated.89 """90 bpy.ops.wm.read_homefile(use_empty=True, use_factory_startup=True)91 bpy.data.orphans_purge()92 link_dir = self.output_path / "Object"93 bpy.ops.wm.link(directory=str(link_dir), filename=TestLibraryOverrides.OBJECT_LIBRARY_PERMISSIVE)94 obj = bpy.data.objects[TestLibraryOverrides.OBJECT_LIBRARY_PERMISSIVE]95 self.assertIsNotNone(obj.override_library)96 local_id = obj.override_create()97 self.assertIsNotNone(local_id.override_library)98 self.assertIsNone(local_id.data.override_library)99 assert(len(local_id.override_library.properties) == 1)100 override_prop = local_id.override_library.properties[0]101 assert(override_prop.rna_path == "scale")102 assert(len(override_prop.operations) == 1)103 override_operation = override_prop.operations[0]104 assert(override_operation.operation == 'NOOP')105 assert(override_operation.subitem_local_index == -1)106 local_id.location.y = 1.0107 local_id.scale.x = 0.5108 # `scale.x` will apply, but will be reverted when the library overrides109 # are updated. This is by design so python scripts can still alter the110 # properties locally what is a typical usecase in productions.111 assert(local_id.scale.x == 0.5)112 assert(local_id.location.y == 1.0)113 local_id.override_library.operations_update()114 assert(local_id.scale.x == 1.0)115 assert(local_id.location.y == 1.0)116 assert(len(local_id.override_library.properties) == 2)117 override_prop = local_id.override_library.properties[0]118 assert(override_prop.rna_path == "scale")119 assert(len(override_prop.operations) == 1)120 override_operation = override_prop.operations[0]121 assert(override_operation.operation == 'NOOP')122 assert(override_operation.subitem_local_index == -1)123 override_prop = local_id.override_library.properties[1]124 assert(override_prop.rna_path == "location")125 assert(len(override_prop.operations) == 1)126 override_operation = override_prop.operations[0]127 assert(override_operation.operation == 'REPLACE')128 assert (override_operation.subitem_local_index == -1)129class TestLibraryTemplate(TestHelper, unittest.TestCase):130 MESH_LIBRARY_PERMISSIVE = "LibMeshPermissive"131 OBJECT_LIBRARY_PERMISSIVE = "LibMeshPermissive"132 def __init__(self, args):133 pass134 def test_permissive_template(self):135 """136 Test setting up a permissive template.137 """138 bpy.ops.wm.read_homefile(use_empty=True, use_factory_startup=True)139 mesh = bpy.data.meshes.new(TestLibraryTemplate.MESH_LIBRARY_PERMISSIVE)140 obj = bpy.data.objects.new(TestLibraryTemplate.OBJECT_LIBRARY_PERMISSIVE, object_data=mesh)141 bpy.context.collection.objects.link(obj)142 assert(obj.override_library is None)143 obj.override_template_create()144 assert(obj.override_library is not None)145 assert(len(obj.override_library.properties) == 0)146 prop = obj.override_library.properties.add(rna_path='scale')147 assert(len(obj.override_library.properties) == 1)148 assert(len(prop.operations) == 0)149 operation = prop.operations.add(operation='NOOP')150 assert(len(prop.operations) == 1)151 assert(operation.operation == 'NOOP')152TESTS = (153 TestLibraryOverrides,154 TestLibraryTemplate,155)156def argparse_create():157 import argparse158 # When --help or no args are given, print this help159 description = "Test library overrides of blend file."160 parser = argparse.ArgumentParser(description=description)161 parser.add_argument(162 "--output-dir",163 dest="output_dir",164 default=".",165 help="Where to output temp saved blendfiles",166 required=False,167 )168 return parser169def main():170 args = argparse_create().parse_args()171 # Don't write thumbnails into the home directory.172 bpy.context.preferences.filepaths.file_preview_type = 'NONE'173 bpy.context.preferences.experimental.use_override_templates = True174 for Test in TESTS:175 Test(args).run_all_tests()176if __name__ == '__main__':177 import sys178 sys.argv = [__file__] + \179 (sys.argv[sys.argv.index("--") + 1:] if "--" in sys.argv else [])...

Full Screen

Full Screen

overridewidget.py

Source:overridewidget.py Github

copy

Full Screen

...85 self.cmd.feedrate(data)86 elif self.override_type == 1:87 self.cmd.rapidrate(data)88 elif self.override_type == 2:89 self.cmd.spindleoverride(data)90 elif self.override_type == 3:91 self.cmd.maxvel(data)92 return True93 # This runs runs at the gooject timeout rate94 # it polls linuxcnc to get the current data95 # and updates the scale to refleck the current value.96 # in this way if eg HALUI is used to set an override the97 # scale will track it.98 def periodic(self):99 try:100 self.status.poll()101 if self.override_type == 0:102 self.override = self.status.feedrate103 elif self.override_type == 1:...

Full Screen

Full Screen

manifest.py

Source:manifest.py Github

copy

Full Screen

1# Copyright (c) 2010-2019 openpyxl2"""3File manifest4"""5from mimetypes import MimeTypes6import os.path7from openpyxl.descriptors.serialisable import Serialisable8from openpyxl.descriptors import String, Sequence9from openpyxl.xml.functions import fromstring10from openpyxl.xml.constants import (11 ARC_CORE,12 ARC_CONTENT_TYPES,13 ARC_WORKBOOK,14 ARC_APP,15 ARC_THEME,16 ARC_STYLE,17 ARC_SHARED_STRINGS,18 EXTERNAL_LINK,19 THEME_TYPE,20 STYLES_TYPE,21 XLSX,22 XLSM,23 XLTM,24 XLTX,25 WORKSHEET_TYPE,26 COMMENTS_TYPE,27 SHARED_STRINGS,28 DRAWING_TYPE,29 CHART_TYPE,30 CHARTSHAPE_TYPE,31 CHARTSHEET_TYPE,32 CONTYPES_NS,33 ACTIVEX,34 CTRL,35 VBA,36)37from openpyxl.xml.functions import tostring38# initialise mime-types39mimetypes = MimeTypes()40mimetypes.add_type('application/xml', ".xml")41mimetypes.add_type('application/vnd.openxmlformats-package.relationships+xml', ".rels")42mimetypes.add_type("application/vnd.ms-office.vbaProject", ".bin")43mimetypes.add_type("application/vnd.openxmlformats-officedocument.vmlDrawing", ".vml")44mimetypes.add_type("image/x-emf", ".emf")45class FileExtension(Serialisable):46 tagname = "Default"47 Extension = String()48 ContentType = String()49 def __init__(self, Extension, ContentType):50 self.Extension = Extension51 self.ContentType = ContentType52class Override(Serialisable):53 tagname = "Override"54 PartName = String()55 ContentType = String()56 def __init__(self, PartName, ContentType):57 self.PartName = PartName58 self.ContentType = ContentType59DEFAULT_TYPES = [60 FileExtension("rels", "application/vnd.openxmlformats-package.relationships+xml"),61 FileExtension("xml", "application/xml"),62]63DEFAULT_OVERRIDE = [64 Override("/" + ARC_STYLE, STYLES_TYPE), # Styles65 Override("/" + ARC_THEME, THEME_TYPE), # Theme66 Override("/docProps/core.xml", "application/vnd.openxmlformats-package.core-properties+xml"),67 Override("/docProps/app.xml", "application/vnd.openxmlformats-officedocument.extended-properties+xml")68]69class Manifest(Serialisable):70 tagname = "Types"71 Default = Sequence(expected_type=FileExtension, unique=True)72 Override = Sequence(expected_type=Override, unique=True)73 path = "[Content_Types].xml"74 __elements__ = ("Default", "Override")75 def __init__(self,76 Default=(),77 Override=(),78 ):79 if not Default:80 Default = DEFAULT_TYPES81 self.Default = Default82 if not Override:83 Override = DEFAULT_OVERRIDE84 self.Override = Override85 @property86 def filenames(self):87 return [part.PartName for part in self.Override]88 @property89 def extensions(self):90 """91 Map content types to file extensions92 Skip parts without extensions93 """94 exts = {os.path.splitext(part.PartName)[-1] for part in self.Override}95 return [(ext[1:], mimetypes.types_map[True][ext]) for ext in sorted(exts) if ext]96 def to_tree(self):97 """98 Custom serialisation method to allow setting a default namespace99 """100 defaults = [t.Extension for t in self.Default]101 for ext, mime in self.extensions:102 if ext not in defaults:103 mime = FileExtension(ext, mime)104 self.Default.append(mime)105 tree = super(Manifest, self).to_tree()106 tree.set("xmlns", CONTYPES_NS)107 return tree108 def __contains__(self, content_type):109 """110 Check whether a particular content type is contained111 """112 for t in self.Override:113 if t.ContentType == content_type:114 return True115 def find(self, content_type):116 """117 Find specific content-type118 """119 try:120 return next(self.findall(content_type))121 except StopIteration:122 return123 def findall(self, content_type):124 """125 Find all elements of a specific content-type126 """127 for t in self.Override:128 if t.ContentType == content_type:129 yield t130 def append(self, obj):131 """132 Add content object to the package manifest133 # needs a contract...134 """135 ct = Override(PartName=obj.path, ContentType=obj.mime_type)136 self.Override.append(ct)137 def _write(self, archive, workbook):138 """139 Write manifest to the archive140 """141 self.append(workbook)142 self._write_vba(workbook)143 self._register_mimetypes(filenames=archive.namelist())144 archive.writestr(self.path, tostring(self.to_tree()))145 def _register_mimetypes(self, filenames):146 """147 Make sure that the mime type for all file extensions is registered148 """149 for fn in filenames:150 ext = os.path.splitext(fn)[-1]151 if not ext:152 continue153 mime = mimetypes.types_map[True][ext]154 fe = FileExtension(ext[1:], mime)155 self.Default.append(fe)156 def _write_vba(self, workbook):157 """158 Add content types from cached workbook when keeping VBA159 """160 if workbook.vba_archive:161 node = fromstring(workbook.vba_archive.read(ARC_CONTENT_TYPES))162 mf = Manifest.from_tree(node)163 filenames = self.filenames164 for override in mf.Override:165 if override.PartName not in (ACTIVEX, CTRL, VBA):166 continue167 if override.PartName not in filenames:...

Full Screen

Full Screen

test_dnsoverride.py

Source:test_dnsoverride.py Github

copy

Full Screen

1from ..dnsoverride import DnsOverrideDescriptor2def test_dns_override_descriptor():3 d = DnsOverrideDescriptor('hostname', '1.2.3.4')4 assert d.hostname == 'hostname'5 assert d.ip_address == '1.2.3.4'6 assert str(d) == 'DNS override hostname -> 1.2.3.4'7def test_dns_override_descriptor_with_no_ip_address():8 d = DnsOverrideDescriptor('hostname')9 assert d.hostname == 'hostname'10 assert d.ip_address is None11 assert str(d) == 'DNS override hostname -> (nothing)'12def test_dns_override_descriptor_is_hashable():13 a = DnsOverrideDescriptor('hostname', '1.2.3.4')14 b = DnsOverrideDescriptor('hostname', '1.2.3.4')15 c = DnsOverrideDescriptor('hostname', '4.3.2.1')16 d = DnsOverrideDescriptor('hostname')17 assert hash(a) == hash(b)18 assert hash(a) != hash(c)19 assert hash(a) != hash(d)20 assert hash(d) == hash(d)21def test_dns_override_descriptor_has_equality_operator():22 a = DnsOverrideDescriptor('hostname', '1.2.3.4')23 b = DnsOverrideDescriptor('hostname', '1.2.3.4')24 c = DnsOverrideDescriptor('hostname', '4.3.2.1')25 d = DnsOverrideDescriptor('hostname')26 assert a == b27 assert a != c28 assert a != d29 assert d == d30def test_dns_override_descriptor_is_less_than_comparable():31 a = DnsOverrideDescriptor('a', '1.2.3.4')32 b = DnsOverrideDescriptor('b', '1.2.3.4')33 c = DnsOverrideDescriptor('b', '2.3.4.5')34 d = DnsOverrideDescriptor('b', '0.0.0.0')35 assert a < b36 assert b < c...

Full Screen

Full Screen

Using AI Code Generation

copy

Full Screen

1import { MockBuilder, MockRender } from 'ng-mocks';2import { AppModule } from './app.module';3import { AppComponent } from './app.component';4describe('AppComponent', () => {5 beforeEach(() => MockBuilder(AppComponent, AppModule));6 it('should create the app', () => {7 const fixture = MockRender(AppComponent);8 const app = fixture.debugElement.componentInstance;9 expect(app).toBeTruthy();10 });11 it(`should have as title 'app'`, () => {12 const fixture = MockRender(AppComponent);13 const app = fixture.debugElement.componentInstance;14 expect(app.title).toEqual('app');15 });16 it('should render title in a h1 tag', () => {17 const fixture = MockRender(AppComponent);18 fixture.detectChanges();19 const compiled = fixture.debugElement.nativeElement;20 expect(compiled.querySelector('h1').textContent).toContain('Welcome to app!');21 });22});23import { NgModule } from '@angular/core';24import { BrowserModule } from '@angular/platform-browser';25import { AppComponent } from './app.component';26@NgModule({27 imports: [28})29export class AppModule { }30import { Component } from '@angular/core';31@Component({32})33export class AppComponent {34 title = 'app';35}36 Welcome to {{title}}!37/* You can add global styles to this file, and also import other style files */38import { MockBuilder, MockRender, MockInstance } from 'ng-mocks';39import { AppModule } from './app.module';40import { AppComponent } from './app.component';41import { MyService } from './my.service';42describe('AppComponent', () => {43 beforeEach(() => MockBuilder(AppComponent, AppModule));44 it('should create the app', () => {45 const fixture = MockRender(AppComponent);46 const app = fixture.debugElement.componentInstance;47 expect(app).toBeTruthy();48 });49 it(`should have as title 'app'`, () => {50 const fixture = MockRender(AppComponent);

Full Screen

Using AI Code Generation

copy

Full Screen

1import { MockBuilder, MockRender, MockInstance, MockReset, MockService } from 'ng-mocks';2import { AppComponent } from './app.component';3import { AppService } from './app.service';4describe('AppComponent', () => {5 beforeEach(() => MockBuilder(AppComponent));6 beforeEach(() => MockRender(AppComponent));7 beforeEach(() => MockInstance(AppService, 'get', () => 'overridden value'));8 it('should display overridden value', () => {9 expect(document.querySelector('h1').textContent).toEqual('overridden value');10 });11 afterEach(() => MockReset());12});13import { Injectable } from '@angular/core';14@Injectable()15export class AppService {16 get() {17 return 'real value';18 }19}20import { Component } from '@angular/core';21import { AppService } from './app.service';22@Component({23})24export class AppComponent {25 title = 'app';26 constructor(private appService: AppService) {27 this.title = this.appService.get();28 }29}30<h1>{{title}}</h1>31import { NgModule } from '@angular/core';32import { BrowserModule } from '@angular/platform-browser';33import { AppComponent } from './app.component';34import { AppService } from './app.service';35@NgModule({36 imports: [BrowserModule],37})38export class AppModule {}39{40 "compilerOptions": {41 },42}43{44 "compilerOptions": {

Full Screen

Using AI Code Generation

copy

Full Screen

1import { MockBuilder, MockRender, ngMocks } from 'ng-mocks';2import { MockBuilder, MockRender, TestBed } from 'ng-mocks';3import { MockBuilder, MockRender, ngMocks } from 'ng-mocks';4import { MockBuilder, MockRender, ngMocks } from 'ng-mocks';5import { MockBuilder, MockRender, ngMocks } from 'ng-mocks';6import { MockBuilder, MockRender, ngMocks } from 'ng-mocks';7import { MockBuilder, MockRender, ngMocks } from 'ng-mocks';8import { MockBuilder, MockRender, ngMocks } from 'ng-mocks';9import { MockBuilder, MockRender, ngMocks } from 'ng-mocks';10import { MockBuilder, MockRender, ngMocks } from 'ng-mocks';11import { MockBuilder, MockRender, ngMocks } from 'ng-mocks';12import { MockBuilder, MockRender, ngMocks } from 'ng-mocks';13import { MockBuilder, MockRender, ngMocks } from 'ng-mocks';14import { MockBuilder, MockRender, ngMocks } from 'ng-mocks';15import { MockBuilder, MockRender, ngMocks } from 'ng-mocks';16import { MockBuilder, MockRender, ngMocks } from 'ng-mocks';17import { MockBuilder, MockRender, ngMocks } from 'ng-mocks';18import {

Full Screen

Using AI Code Generation

copy

Full Screen

1ngMocks.overrideComponent(MyComponent, {2 set: {3 },4});5ngMocks.overrideComponent(MyComponent, {6 set: {7 },8});9ngMocks.overrideComponent(MyComponent, {10 set: {11 },12});13ngMocks.overrideComponent(MyComponent, {14 set: {15 },16});17ngMocks.overrideComponent(MyComponent, {18 set: {19 },20});21ngMocks.overrideComponent(MyComponent, {22 set: {23 },24});25ngMocks.overrideComponent(MyComponent, {26 set: {27 },28});29ngMocks.overrideComponent(MyComponent, {30 set: {31 },32});33ngMocks.overrideComponent(MyComponent, {34 set: {35 },36});37ngMocks.overrideComponent(MyComponent, {38 set: {39 },40});41ngMocks.overrideComponent(MyComponent, {42 set: {43 },44});45ngMocks.overrideComponent(MyComponent, {46 set: {47 },48});49ngMocks.overrideComponent(MyComponent, {50 set: {51 },52});

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run ng-mocks automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful