diff --git a/CHANGELOG.md b/CHANGELOG.md index a11734b..f34770e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,20 @@ +## v0.9.29 (2026-01-14) + +### Fix + +- add timeout reset +- improve requests and cooldown +- correct playwright installation +- correct dockerignore and entrypoint + +### Refactor + +- add ruff formatting +- improve timeout logic +- improve linking procedure +- update dependencies +- start improving requests + ## v0.9.28 (2026-01-12) ### Fix diff --git a/README.md b/README.md index 805d6f3..c92010d 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # Strain Authentication and Identification Methods - saim -[![release: 0.9.28](https://img.shields.io/badge/rel-0.9.28-blue.svg?style=flat-square)](https://github.com/LeibnizDSMZ/saim.git) +[![release: 0.9.29](https://img.shields.io/badge/rel-0.9.29-blue.svg?style=flat-square)](https://github.com/LeibnizDSMZ/saim.git) [![MIT LICENSE](https://img.shields.io/badge/License-MIT-brightgreen.svg?style=flat-square)](https://choosealicense.com/licenses/mit/) [![Documentation Status](https://img.shields.io/badge/docs-GitHub-blue.svg?style=flat-square)](https://LeibnizDSMZ.github.io/saim/) diff --git a/lefthook.yml b/lefthook.yml index 4e7ccea..6a744df 100644 --- a/lefthook.yml +++ b/lefthook.yml @@ -54,10 +54,14 @@ pre-commit: group: piped: true jobs: - - name: ruff + - name: ruff-check glob: "*.py" run: ruff check {staged_files} --fix stage_fixed: true + - name: ruff-format + glob: "*.py" + run: ruff format {staged_files} + stage_fixed: true - name: uv-export run: make runLock && git add \*/requirements\*\.txt stage_fixed: true diff --git a/pyproject.toml b/pyproject.toml index 3ecb2f8..f77e335 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "saim" -version = "0.9.28" +version = "0.9.29" description = """A library for identifying strains.""" readme = "README.md" authors = [ @@ -28,7 +28,7 @@ dependencies = [ [project.scripts] extract_ccno = "saim.designation.main:run" -verify_cafi = "saim.culture_link.main:run" +verify_links = "saim.culture_link.main:run" [tool.setuptools.packages.find] where = ["src"] @@ -250,7 +250,7 @@ parallel = true [tool.coverage.report] show_missing = false -fail_under = 50 +fail_under = 30 [tool.vulture] paths = ["configs/dev/whitelist.py", "src"] diff --git a/src/saim/culture_link/private/cached_session.py b/src/saim/culture_link/private/cached_session.py index eb16a5a..9d2b29f 100644 --- a/src/saim/culture_link/private/cached_session.py +++ b/src/saim/culture_link/private/cached_session.py @@ -12,15 +12,20 @@ Mapping, ParamSpec, final, - override, ) import warnings from requests import PreparedRequest, Timeout from requests.structures import CaseInsensitiveDict -from requests.adapters import HTTPAdapter, BaseAdapter -from requests_cache import AnyResponse, BaseCache, CachedSession -from playwright.async_api import (Response, async_playwright, Error, - BrowserContext, Playwright, Page) +from requests.adapters import BaseAdapter +from requests_cache import BaseCache, CachedSession +from playwright.async_api import ( + Response, + async_playwright, + Error, + BrowserContext, + Playwright, + Page, +) from urllib3 import HTTPResponse from requests.models import Response as RequestResponse from requests.exceptions import RequestException @@ -41,9 +46,7 @@ async def _get_resp( resp = await call() except Error as err: warnings.warn( - f"{retry!s} - {err!s} - {resp!s} - {err_str}", - RequestWarn, - stacklevel=0 + f"{retry!s} - {err!s} - {resp!s} - {err_str}", RequestWarn, stacklevel=0 ) return resp @@ -136,18 +139,6 @@ def close(self, last: bool) -> None: runner.close() -@final -class SimpleHTTPAdapter(HTTPAdapter): - - @override - def close(self) -> None: - pass - - def finish(self) -> None: - print("CLOSING HTTP") - super(HTTPAdapter, self).close() - - BLOCK_TYPES: Final[list[str]] = [ "image", "media", @@ -163,19 +154,23 @@ class BrowserPWAdapter(BaseAdapter): __slots__: tuple[str, ...] = ( "__browser", "__contact", + "__cool_down", + "__delay", "__pwc", "__retries", "__runner", - "__tmp" + "__tmp", ) def __init__( - self, pwc: PWContext, contact: str = "", max_retries: int = 0, / + self, pwc: PWContext, contact: str = "", max_attempts: int = 1, / ) -> None: - self.__retries = max_retries if max_retries > 1 else 1 self.__pwc: PWContext = pwc self.__contact = contact self.__tmp = tempfile.TemporaryDirectory() + self.__cool_down: CoolDownDomain | None = None + self.__delay = 1.0 + self.__retries = max_attempts if max_attempts > 1 else 1 if not self.__pwc.is_test: ctx = self.__pwc.runner.run(self.__pwc.ctx) self.__browser: BrowserContext | None = self.__pwc.runner.run( @@ -192,6 +187,16 @@ def __init__( self.__browser = None super().__init__() + def set_cool_down(self, cool_down: CoolDownDomain, delay: float, /) -> None: + self.__cool_down = cool_down + self.__delay = delay + + async def __await_cool_down(self) -> None: + if self.__cool_down is None: + await asyncio.sleep(1.0) + else: + self.__cool_down.await_cool_down(self.__delay) + async def __send( self, url: str, @@ -211,7 +216,8 @@ async def __send( "**/*", lambda route, req: ( route.abort() - if req.resource_type in BLOCK_TYPES else route.continue_() + if req.resource_type in BLOCK_TYPES + else route.continue_() ), ) page.on("console", lambda _: None) @@ -219,10 +225,12 @@ async def __send( {"User-Agent": get_user_agent(self.__contact)} ) att_time = tout_msec * (0.5 if attempt > 0 else 1.0) + async def go_to_page(p: Page = page, t: float = att_time) -> Response | None: return await p.goto(url, timeout=t, wait_until="load") - resp: Response | None = await _get_resp(go_to_page, err_str, attempt +1) + await self.__await_cool_down() + resp: Response | None = await _get_resp(go_to_page, err_str, attempt + 1) if resp is not None: start_time = time.time() try: @@ -239,7 +247,7 @@ async def go_to_page(p: Page = page, t: float = att_time) -> Response | None: return _create_response(request, resp, content) await page.close() if attempt + 1 < self.__retries: - await asyncio.sleep(1.0 + (random.random() - 0.5)) # noqa: S311 + await asyncio.sleep(1.0 + (random.random() - 0.5)) # noqa: S311 return None def send( @@ -272,18 +280,11 @@ def finish(self) -> None: self.__tmp.cleanup() -def _mount_adapters( - adapter_pw: BrowserPWAdapter | SimpleHTTPAdapter, session: CachedSession, / -) -> None: - session.mount("http://", adapter_pw) - session.mount("https://", adapter_pw) - - P = ParamSpec("P") -def create_get_cache( - adapter: BrowserPWAdapter | SimpleHTTPAdapter, +def _create_get_cache( + adapter: BrowserPWAdapter, exp_days: int, backend: BaseCache, key_fn: Callable[Concatenate[PreparedRequest, P], str], @@ -297,83 +298,59 @@ def create_get_cache( stale_if_error=False, always_revalidate=False, allowable_codes=[*range(200, 400), 404, 403], - allowable_methods=( - "GET", - "HEAD", - ), + allowable_methods=("GET",), key_fn=key_fn, ) - _mount_adapters(adapter, session) + session.mount("http://", adapter) + session.mount("https://", adapter) except Error as cex: raise SessionCreationEx(f"{cex!s}") from cex return session -def run_request(browser: bool, session: CachedSession, /) -> Callable[..., AnyResponse]: - if browser: - return session.get - return session.head - - -def _browser_fallback_wrap( - browser: bool, - pw_adapter: BrowserPWAdapter, - session: CachedSession, - url: str, - contact: str, - /, -) -> AnyResponse: - params = { - "timeout": 180, - "allow_redirects": True, - "headers": {"User-Agent": get_user_agent(contact)}, - } - try: - response = run_request(browser, session)(url, **params) - except (Error, RequestException): - if not browser: - _mount_adapters(pw_adapter, session) - response = session.get(url, **params) - else: - raise - else: - if 400 <= response.status_code < 500 and not browser: - response = session.get(url, **params) - return response - - def make_get_request( - browser: bool, - pw_adapter: BrowserPWAdapter, url: str, - session: CachedSession, - domain_info: tuple[CoolDownDomain, RobotsTxt], - contact: str, + session: tuple[ + BrowserPWAdapter, + int, + BaseCache, + Callable[Concatenate[PreparedRequest, P], str], + ], + info: tuple[CoolDownDomain, RobotsTxt, str], + tasks_cnt: int, /, ) -> CachedPageResp: results = CachedPageResp(prohibited=True) - cool_down, robots_txt = domain_info + cool_down, robots_txt, contact = info + pw_adapter, exp, cache, call = session + + pw_adapter.set_cool_down(cool_down, robots_txt.get_delay()) + cached_session = _create_get_cache( + pw_adapter, + exp, + cache, + call, + ) - def _callback(last_request: float, /) -> tuple[float, bool]: - if last_request < 0: - return last_request, True - nonlocal results - request_time = time.time() + if robots_txt.can_fetch(url): + if cool_down.skip_request(): + return results try: - response = _browser_fallback_wrap(browser, pw_adapter, session, url, contact) - if response.from_cache: - request_time = last_request + response = cached_session.get( + url, + **{ + "timeout": 180, + "allow_redirects": True, + "headers": {"User-Agent": get_user_agent(contact)}, + }, + ) except (Error, RequestException): - results = CachedPageResp(timeout=True) - return request_time, True + cool_down.finished_request(True, tasks_cnt) + return CachedPageResp(timeout=True) results = CachedPageResp( response=b"" if response.content is None else response.content, status=response.status_code, cached=response.from_cache, ) - return request_time, False - - if robots_txt.can_fetch(url): - delay = robots_txt.get_delay() - cool_down.call_after_cool_down(delay, _callback) + cool_down.finished_request(results.timeout, tasks_cnt) return results diff --git a/src/saim/culture_link/private/container.py b/src/saim/culture_link/private/container.py index c1521fd..eb5ba9e 100644 --- a/src/saim/culture_link/private/container.py +++ b/src/saim/culture_link/private/container.py @@ -56,31 +56,32 @@ class TaskPackage: template_links: CatalogueLink fallback_link: str = "" + @property + def urls(self) -> list[tuple[str, str, str, int]]: + return [ + *[ + ( + LinkLevel.cat.value, + *self._pack_catalogue(cat), + ) + for cat in self.template_links.catalogue + ], + ("fallback", *self._pack_catalogue(self.fallback_link)), + ( + LinkLevel.home.value, + self.template_links.homepage, + str(CacheNames.hom.value), + HOME_EXP_DAYS, + ), + ] + def _pack_catalogue(self, link: str, /) -> tuple[str, str, int]: if len(self.search_task.find_extra) == 0: return (link, str(CacheNames.cat.value), CAT_EXP_DAYS) return (link, str(CacheNames.cat_det.value), CAT_DET_EXP_DAYS) def __iter__(self) -> Iterator[tuple[str, str, str, int]]: - return iter( - task - for task in [ - *[ - ( - LinkLevel.cat.value, - *self._pack_catalogue(cat), - ) - for cat in self.template_links.catalogue - ], - ("fallback", *self._pack_catalogue(self.fallback_link)), - ( - LinkLevel.home.value, - self.template_links.homepage, - str(CacheNames.hom.value), - HOME_EXP_DAYS, - ), - ] - ) + return iter(task for task in self.urls) @final diff --git a/src/saim/culture_link/private/cool_down.py b/src/saim/culture_link/private/cool_down.py index d34b169..987e9f2 100644 --- a/src/saim/culture_link/private/cool_down.py +++ b/src/saim/culture_link/private/cool_down.py @@ -1,14 +1,14 @@ from multiprocessing.context import SpawnContext import time -from typing import Callable, Final, final +from typing import Final, final import warnings from saim.shared.error.warnings import RequestWarn -_COOL_DOWN: Final[float] = 1.0 +_COOL_DOWN: Final[float] = 3.0 _T_RESET: Final[int] = 259200 -_T_LIMIT: Final[int] = 3 +_T_LIMIT: Final[float] = 3.0 _MAX_DELAY: Final[int] = 5 @@ -18,53 +18,50 @@ class CoolDownDomain: def __init__(self, mpc: SpawnContext, domain: str, /) -> None: manager = mpc.Manager() - self.__timeout_cnt = manager.Value("i", 0) - self.__last_request = manager.Value("d", time.time() - _COOL_DOWN) + self.__timeout_cnt = manager.Value("d", 0.0) + last_req = time.time() - _COOL_DOWN + if last_req < 0: + last_req = time.time() + self.__last_request = manager.Value("d", last_req) self.__lock = manager.Lock() self.__domain = domain super().__init__() - def call_after_cool_down( - self, delay: float, callback: Callable[[float], tuple[float, bool]], / - ) -> None: + def await_cool_down(self, delay: float, /) -> None: + wait_time = 0.0 + cool_down_sec = delay if 0 < delay < _MAX_DELAY else _COOL_DOWN + + while True: + with self.__lock: + now = time.time() + next_allowed = self.__last_request.value + cool_down_sec + wait_time = max(0, next_allowed - now) + + if wait_time == 0: + self.__last_request.value = now + break + time.sleep(wait_time + 0.01) + + def skip_request(self) -> bool: with self.__lock: last_req = self.__last_request.value - cool_down_sec = delay if 0 < delay < _MAX_DELAY else _COOL_DOWN - time_dif = time.time() - last_req - time_out_cnt = self.__timeout_cnt.value - if time_out_cnt >= _T_LIMIT and time_dif < _T_RESET: - last_req = -1 - wait_time = cool_down_sec - time.time() + last_req - new_req = last_req - if wait_time > 0: - new_req = last_req + wait_time - self.__last_request.value = new_req - if wait_time > 0: - time.sleep(wait_time) - if delay >= _MAX_DELAY: - warnings.warn( - f"[DELAY] High delay requirement detected - {self.__domain}", - RequestWarn, - stacklevel=2, - ) - request_time, timeout = callback(last_req) + timeout_cnt = self.__timeout_cnt.value + if timeout_cnt < _T_LIMIT: + return False + if (time.time() - last_req) >= _T_RESET: + self.__timeout_cnt.value = 0 + return False + return True + + def finished_request(self, timeout: bool, tasks_cnt: int, /) -> None: with self.__lock: - if self.__last_request.value == new_req: - self.__last_request.value = request_time - if timeout: - cur_add = 0 if time_out_cnt >= _T_LIMIT else 1 - self.__timeout_cnt.value += cur_add - info = "skipped" if last_req == -1 else "called" + timeout_cnt = self.__timeout_cnt.value + if not timeout and timeout_cnt > 0: + self.__timeout_cnt.value = 0.0 + if timeout and timeout_cnt < _T_LIMIT: + self.__timeout_cnt.value += 1.0 / tasks_cnt warnings.warn( - f"[TIMEOUT] {self.__domain} [{time_out_cnt} - {cur_add}] - {info}", + f"[TIMEOUT] {self.__domain} [{self.__timeout_cnt.value}]", RequestWarn, - stacklevel=2, + stacklevel=1, ) - else: - if time_out_cnt > 0: - warnings.warn( - f"[TIMEOUT] {self.__domain} timeout reset", - RequestWarn, - stacklevel=2, - ) - self.__timeout_cnt.value = 0 diff --git a/src/saim/culture_link/private/verify_ccno.py b/src/saim/culture_link/private/verify_ccno.py index ca7dfe4..b7f9843 100644 --- a/src/saim/culture_link/private/verify_ccno.py +++ b/src/saim/culture_link/private/verify_ccno.py @@ -17,12 +17,9 @@ create_key, yaml_serializer, ) -from urllib3 import Retry from saim.culture_link.private.cached_session import ( BrowserPWAdapter, PWContext, - SimpleHTTPAdapter, - create_get_cache, make_get_request, ) from saim.culture_link.private.constants import CacheNames, VerificationStatus @@ -67,16 +64,13 @@ def _wrap_status( _REQ: TypeAlias = dict[str, tuple[CoolDownDomain, RobotsTxt]] _ARGS_T: TypeAlias = tuple[TaskPackage, _REQ] -_ARGS_ST: TypeAlias = tuple[ - TaskPackage, _REQ, int, Path, BrowserPWAdapter | None, SimpleHTTPAdapter | None, str -] +_ARGS_ST: TypeAlias = tuple[TaskPackage, _REQ, int, Path, BrowserPWAdapter | None, str] _WSP: Final[Pattern[str]] = re.compile(r"\s+") @final @dataclass(frozen=True, slots=True) class SessionSettings: - req_adapter: SimpleHTTPAdapter pw_adapter: BrowserPWAdapter url: str name: str @@ -200,6 +194,7 @@ def _get_result( settings: SessionSettings, domain: tuple[CoolDownDomain, RobotsTxt], sea_task: SearchTask, + tasks_cnt: int, /, ) -> tuple[CachedPageResp, LinkResult | None]: skip_search = settings.name == str(CacheNames.hom.value) @@ -228,19 +223,15 @@ def wrap_key_f(request: PreparedRequest, **kwargs: Any) -> str: name="yaml_slim", is_binary=False, ) - browser = settings.name in [str(CacheNames.cat.value), str(CacheNames.cat_det.value)] - main_adapter: BrowserPWAdapter | SimpleHTTPAdapter = settings.req_adapter - if browser: - main_adapter = settings.pw_adapter backend = create_sqlite_backend( f"verify_ccno_{settings.name}", settings.work_dir, custom_ser_p )(settings.db_size_gb, settings.exp_days) - with create_get_cache( - main_adapter, settings.exp_days, backend, wrap_key_f - ) as session: - resp = make_get_request( - browser, settings.pw_adapter, settings.url, session, domain, settings.contact - ) + resp = make_get_request( + settings.url, + (settings.pw_adapter, settings.exp_days, backend, wrap_key_f), + (*domain, settings.contact), + tasks_cnt, + ) if not resp.cached and closure: resp = CachedPageResp.change_to_cached_content(resp, buffered) if resp.cached: @@ -248,20 +239,6 @@ def wrap_key_f(request: PreparedRequest, **kwargs: Any) -> str: return resp, _prepare_result_raw(settings.url, resp, sea_task, skip_search) -def _create_req_adapter(adapter: SimpleHTTPAdapter | None, /) -> SimpleHTTPAdapter: - if adapter is not None: - return adapter - return SimpleHTTPAdapter( - max_retries=Retry( - status=3, - backoff_factor=0.2, - backoff_max=10, - respect_retry_after_header=False, - status_forcelist=[500, 502, 503, 504], - ) - ) - - def _create_pw_adapter( adapter: BrowserPWAdapter | None, contact: str, / ) -> BrowserPWAdapter: @@ -271,7 +248,7 @@ def _create_pw_adapter( def verify_ccno_in_url(args: _ARGS_ST, /) -> VerifiedURL: - task, cool_down, size, folder, pwa, rea, contact = args + task, cool_down, size, folder, pwa, contact = args status = [] try: for url_typ, url, name, exp in task: @@ -280,7 +257,6 @@ def verify_ccno_in_url(args: _ARGS_ST, /) -> VerifiedURL: continue resp, ana_result = _get_result( SessionSettings( - _create_req_adapter(rea), _create_pw_adapter(pwa, contact), url, name, @@ -291,6 +267,7 @@ def verify_ccno_in_url(args: _ARGS_ST, /) -> VerifiedURL: ), domain, task.search_task, + len(task.urls), ) status.append( LinkStatus( @@ -323,14 +300,12 @@ def value(self) -> bool: ... @final class VerifyCcNosProc: - __slots__: tuple[str, ...] = ( "__contact", "__finish", "__folder", "__pw_adapter", "__read", - "__req_adapter", "__size", "__write", ) @@ -351,7 +326,6 @@ def __init__( self.__folder = folder self.__finish: ValueP = finish self.__contact = contact - self.__req_adapter: SimpleHTTPAdapter | None = None self.__pw_adapter: BrowserPWAdapter | None = None atexit.register(lambda: self.close()) super().__init__() @@ -361,11 +335,6 @@ def _pw_adapter(self) -> BrowserPWAdapter: self.__pw_adapter = _create_pw_adapter(self.__pw_adapter, self.__contact) return self.__pw_adapter - @property - def _req_adapter(self) -> SimpleHTTPAdapter: - self.__req_adapter = _create_req_adapter(self.__req_adapter) - return self.__req_adapter - def __verify_ccno_in_url(self, args: _ARGS_T, /) -> VerifiedURL: task, req = args return verify_ccno_in_url( @@ -375,7 +344,6 @@ def __verify_ccno_in_url(self, args: _ARGS_T, /) -> VerifiedURL: self.__size, self.__folder, self._pw_adapter, - self._req_adapter, self.__contact, ) ) @@ -392,9 +360,6 @@ def run(self) -> None: self.__write.put(result) def close(self) -> None: - if self.__req_adapter is not None: - self.__req_adapter.finish() - self.__req_adapter = None if self.__pw_adapter is not None: self.__pw_adapter.finish() self.__pw_adapter = None diff --git a/src/saim/shared/cafi/adapter.py b/src/saim/shared/cafi/adapter.py index 9a6ca23..c644ae6 100644 --- a/src/saim/shared/cafi/adapter.py +++ b/src/saim/shared/cafi/adapter.py @@ -16,7 +16,7 @@ def parse_ccno_to_cat_args(ccno: CCNoDes, /) -> CatArgs: def get_domain_from_cafi(links: CatalogueLink, fallback: str, /) -> str: - match (links.level): + match links.level: case LinkLevel.cat if len(links.catalogue) > 0: return get_domain(links.catalogue[0]) case LinkLevel.home if fallback == "": diff --git a/src/saim/shared/misc/constants.py b/src/saim/shared/misc/constants.py index 915f3b9..4382c23 100644 --- a/src/saim/shared/misc/constants.py +++ b/src/saim/shared/misc/constants.py @@ -2,4 +2,4 @@ ENCODING: Final[str] = "utf-8" -VERSION: Final[str] = "v0.9.28" +VERSION: Final[str] = "v0.9.29" diff --git a/src/saim/shared/search/radix_tree.py b/src/saim/shared/search/radix_tree.py index 617328f..a8f4346 100644 --- a/src/saim/shared/search/radix_tree.py +++ b/src/saim/shared/search/radix_tree.py @@ -131,7 +131,6 @@ def radix_get_next[T](radix: RadixTree[T], ind: str, /) -> RadixTree[T] | None: def _append_2_tuple_iter[T]( data: tuple[_RQP[T], ...], to_add_k: str, to_add_v: str, index: tuple[T, ...], / ) -> Iterable[_RQP[T]]: - append = True for ite in data: if ite[0] == to_add_k: diff --git a/src/saim/taxon_name/manager.py b/src/saim/taxon_name/manager.py index bcc70d2..e611fb8 100644 --- a/src/saim/taxon_name/manager.py +++ b/src/saim/taxon_name/manager.py @@ -102,7 +102,6 @@ def _keep_ids(ncbi: int, lpsn: int, id_con: _IdP, /) -> bool: @final class TaxonManager: - __slots__ = ( "__gbif", "__jump", @@ -135,7 +134,6 @@ def __new__(cls, *_args: Path | str) -> Self: def __create_session( self, cnf: LPSNConf, / ) -> tuple[GbifTaxReq, NcbiTaxReq, LpsnTaxReq]: - try: return ( GbifTaxReq(self.__wir, self._exp_days), diff --git a/src/saim/taxon_name/private/lpsn.py b/src/saim/taxon_name/private/lpsn.py index dd9ac00..f53fa3e 100644 --- a/src/saim/taxon_name/private/lpsn.py +++ b/src/saim/taxon_name/private/lpsn.py @@ -33,16 +33,14 @@ def _create_header(lpsn_cred: JWTCred, /) -> dict[str, str]: } -def _request_next[ - RT: (LPSNName, LPSNId) -]( +def _request_next[RT: (LPSNName, LPSNId)]( req_res: RT, lpsn_cred: JWTCred, session: CachedSession, cont: type[RT], cnt: int = 1, /, -) -> (tuple[RT, bool] | None): +) -> tuple[RT, bool] | None: if req_res.next is None or req_res.next == "" or cnt > 3: return None err_401 = False diff --git a/tests/fixture/links.py b/tests/fixture/links.py index b6bf3dd..7c75b13 100644 --- a/tests/fixture/links.py +++ b/tests/fixture/links.py @@ -69,4 +69,4 @@ def task_pack(search_task: SearchTask) -> TaskPackage: @pytest.fixture(scope="session") def browser_adapter() -> BrowserPWAdapter: - return BrowserPWAdapter(PWContext(2, True), "", 3) + return BrowserPWAdapter(PWContext(2, True)) diff --git a/tests/unit/culture_links/test_cool_down.py b/tests/unit/culture_links/test_cool_down.py index 5c0b6ed..2ae50b8 100644 --- a/tests/unit/culture_links/test_cool_down.py +++ b/tests/unit/culture_links/test_cool_down.py @@ -1,22 +1,7 @@ -import pytest from saim.culture_link.private.cool_down import CoolDownDomain -from saim.shared.error.warnings import RequestWarn pytest_plugins = ("tests.fixture.links",) def test_cool_down(cool_down: CoolDownDomain) -> None: assert cool_down is not None - - -def mock_function(x: float) -> tuple[float, bool]: - if x > 0.5: - return (0, True) - return (0, False) - - -def test_call_after_cool_down(cool_down: CoolDownDomain) -> None: - with pytest.warns(RequestWarn, match=r"^\[TIMEOUT\] .+ \[0 - 1\] - called$"): - cool_down.call_after_cool_down(2, mock_function) - with pytest.warns(RequestWarn, match=r"^\[TIMEOUT\] .+ timeout reset$"): - cool_down.call_after_cool_down(0.1, mock_function) diff --git a/tests/unit/culture_links/test_verify_ccno.py b/tests/unit/culture_links/test_verify_ccno.py index d9e7961..d199cab 100644 --- a/tests/unit/culture_links/test_verify_ccno.py +++ b/tests/unit/culture_links/test_verify_ccno.py @@ -93,7 +93,6 @@ def test_prepare_result_raw_suc( def test_prepare_result_raw_fail( search_task: SearchTask, cached_resp_fail: CachedPageResp ) -> None: - assert _prepare_result_raw("somelink", cached_resp_fail, search_task, False) is None @@ -101,7 +100,6 @@ def test_prepare_result_raw_fail( def test_prepare_result_raw_fail_emp( search_task: SearchTask, cached_resp_suc_emp: CachedPageResp ) -> None: - assert ( _prepare_result_raw("somelink", cached_resp_suc_emp, search_task, False) is None ) @@ -124,7 +122,6 @@ def _cr_args_test_verify_ccno_in_url( int, Path, BrowserPWAdapter, - None, str, ]: cool = CoolDownDomain(get_worker_ctx(), "test.test") @@ -148,12 +145,12 @@ def _cr_args_test_verify_ccno_in_url( timeout=False, prohibited=False, ) - return (task_pack, c_down, 20, workdir, browser_adapter, None, "") + return (task_pack, c_down, 20, workdir, browser_adapter, "") @pytest.mark.filterwarnings("ignore:.* http.*") @unittest.mock.patch("saim.culture_link.private.verify_ccno.make_get_request") -@unittest.mock.patch("saim.culture_link.private.verify_ccno.create_get_cache") +@unittest.mock.patch("saim.culture_link.private.cached_session._create_get_cache") @unittest.mock.patch("saim.culture_link.private.verify_ccno.yaml_serializer") def test_verify_ccno_in_url_ok( mock_serializer: Mock, @@ -188,7 +185,7 @@ def test_verify_ccno_in_url_ok( @pytest.mark.filterwarnings("ignore:.* http.*") @unittest.mock.patch("saim.culture_link.private.verify_ccno.make_get_request") -@unittest.mock.patch("saim.culture_link.private.verify_ccno.create_get_cache") +@unittest.mock.patch("saim.culture_link.private.cached_session._create_get_cache") @unittest.mock.patch("saim.culture_link.private.verify_ccno.yaml_serializer") def test_verify_ccno_in_url_fail( mock_serializer: Mock, @@ -232,7 +229,7 @@ def test_verify_ccno_in_url_fail( @pytest.mark.filterwarnings("ignore:.* http.*") @unittest.mock.patch("saim.culture_link.private.verify_ccno.make_get_request") -@unittest.mock.patch("saim.culture_link.private.verify_ccno.create_get_cache") +@unittest.mock.patch("saim.culture_link.private.cached_session._create_get_cache") @unittest.mock.patch("saim.culture_link.private.verify_ccno.yaml_serializer") def test_verify_ccno_in_url_fatal( mock_serializer: Mock, diff --git a/tests/unit/designation/test_sample.py b/tests/unit/designation/test_sample.py index c1a65e4..53f9e52 100644 --- a/tests/unit/designation/test_sample.py +++ b/tests/unit/designation/test_sample.py @@ -15,7 +15,6 @@ class TestSample: - def test_valid_ccno_des(self, brc_simple: BrcContainer) -> None: valid_ccnos = [ "DSM3", diff --git a/uv.lock b/uv.lock index e8a70d8..a55f1e7 100644 --- a/uv.lock +++ b/uv.lock @@ -1231,7 +1231,7 @@ wheels = [ [[package]] name = "saim" -version = "0.9.28" +version = "0.9.29" source = { editable = "." } dependencies = [ { name = "cafi" },