Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Use hatch fmt command #1424

Merged
merged 15 commits into from
May 28, 2024
2 changes: 1 addition & 1 deletion .github/workflows/python-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ jobs:
- name: Run Linters
run: |
hatch -v run typing:test
hatch -v run lint:build
hatch fmt
pipx run interrogate -v .
pipx run doc8 --max-line-length=200 --ignore-path=docs/source/other/full-config.rst
npm install -g eslint
Expand Down
2 changes: 1 addition & 1 deletion docs/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@

# General information about the project.
project = "Jupyter Server"
copyright = "2020, Jupyter Team, https://jupyter.org"
copyright = "2020, Jupyter Team, https://jupyter.org" # noqa: A001
author = "The Jupyter Team"

# ghissue config
Expand Down
4 changes: 2 additions & 2 deletions examples/simple/tests/test_handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import pytest


@pytest.fixture()
@pytest.fixture
def jp_server_auth_resources(jp_server_auth_core_resources):
"""The server auth resources."""
for url_regex in [
Expand All @@ -13,7 +13,7 @@ def jp_server_auth_resources(jp_server_auth_core_resources):
return jp_server_auth_core_resources


@pytest.fixture()
@pytest.fixture
def jp_server_config(jp_template_dir, jp_server_authorizer):
"""The server config."""
return {
Expand Down
2 changes: 1 addition & 1 deletion jupyter_server/_sysinfo.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ def pkg_commit_hash(pkg_path):
if p.exists(p.join(cur_path, ".git")):
try:
proc = subprocess.Popen(
["git", "rev-parse", "--short", "HEAD"],
["git", "rev-parse", "--short", "HEAD"], # noqa: S607
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=pkg_path,
Expand Down
2 changes: 1 addition & 1 deletion jupyter_server/_tz.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
ZERO = timedelta(0)


class tzUTC(tzinfo):
class tzUTC(tzinfo): # noqa: N801
"""tzinfo object for UTC (zero offset)"""

def utcoffset(self, d: datetime | None) -> timedelta:
Expand Down
2 changes: 1 addition & 1 deletion jupyter_server/auth/authorizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def is_authorized(
bool
True if user authorized to make request; False, otherwise
"""
raise NotImplementedError()
raise NotImplementedError


class AllowAllAuthorizer(Authorizer):
Expand Down
4 changes: 2 additions & 2 deletions jupyter_server/auth/security.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,13 +68,13 @@ def passwd(passphrase=None, algorithm="argon2"):
)
h_ph = ph.hash(passphrase)

return ":".join((algorithm, h_ph))
return f"{algorithm}:{h_ph}"

h = hashlib.new(algorithm)
salt = ("%0" + str(salt_len) + "x") % random.getrandbits(4 * salt_len)
h.update(passphrase.encode("utf-8") + salt.encode("ascii"))

return ":".join((algorithm, salt, h.hexdigest()))
return f"{algorithm}:{salt}:{h.hexdigest()}"


def passwd_check(hashed_passphrase, passphrase):
Expand Down
2 changes: 1 addition & 1 deletion jupyter_server/auth/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -166,4 +166,4 @@ def get_anonymous_username() -> str:
Get a random user-name based on the moons of Jupyter.
This function returns names like "Anonymous Io" or "Anonymous Metis".
"""
return moons_of_jupyter[random.randint(0, len(moons_of_jupyter) - 1)]
return moons_of_jupyter[random.randint(0, len(moons_of_jupyter) - 1)] # noqa: S311
2 changes: 1 addition & 1 deletion jupyter_server/base/call_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def get(cls, name: str) -> Any:

if name in name_value_map:
return name_value_map[name]
return None # TODO - should this raise `LookupError` (or a custom error derived from said)
return None # TODO: should this raise `LookupError` (or a custom error derived from said)

@classmethod
def set(cls, name: str, value: Any) -> None:
Expand Down
4 changes: 2 additions & 2 deletions jupyter_server/base/handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
import prometheus_client
from jinja2 import TemplateNotFound
from jupyter_core.paths import is_hidden
from jupyter_events import EventLogger
from tornado import web
from tornado.log import app_log
from traitlets.config import Application
Expand All @@ -45,6 +44,7 @@

if TYPE_CHECKING:
from jupyter_client.kernelspec import KernelSpecManager
from jupyter_events import EventLogger
from jupyter_server_terminals.terminalmanager import TerminalManager
from tornado.concurrent import Future

Expand Down Expand Up @@ -785,7 +785,7 @@ def get_login_url(self) -> str:

@property
def content_security_policy(self) -> str:
csp = "; ".join(
csp = "; ".join( # noqa: FLY002
[
super().content_security_policy,
"default-src 'none'",
Expand Down
2 changes: 1 addition & 1 deletion jupyter_server/config_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ def get(self, section_name: str, include_root: bool = True) -> dict[str, t.Any]:
try:
recursive_update(data, json.load(f))
except json.decoder.JSONDecodeError:
self.log.warn("Invalid JSON in %s, skipping", path)
self.log.warning("Invalid JSON in %s, skipping", path)
return data

def set(self, section_name: str, data: t.Any) -> None:
Expand Down
2 changes: 1 addition & 1 deletion jupyter_server/gateway/connections.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ async def _read_messages(self):

# NOTE(esevan): if websocket is not disconnected by client, try to reconnect.
if not self.disconnected and self.retry < GatewayClient.instance().gateway_retry_max:
jitter = random.randint(10, 100) * 0.01
jitter = random.randint(10, 100) * 0.01 # noqa: S311
retry_interval = (
min(
GatewayClient.instance().gateway_retry_interval * (2**self.retry),
Expand Down
8 changes: 4 additions & 4 deletions jupyter_server/gateway/gateway_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -296,7 +296,7 @@ def _http_user_default(self):
help="""The password for HTTP authentication. (JUPYTER_GATEWAY_HTTP_PWD env var)
""",
)
http_pwd_env = "JUPYTER_GATEWAY_HTTP_PWD"
http_pwd_env = "JUPYTER_GATEWAY_HTTP_PWD" # noqa: S105

@default("http_pwd")
def _http_pwd_default(self):
Expand Down Expand Up @@ -347,7 +347,7 @@ def _auth_header_key_default(self):

(JUPYTER_GATEWAY_AUTH_TOKEN env var)""",
)
auth_token_env = "JUPYTER_GATEWAY_AUTH_TOKEN"
auth_token_env = "JUPYTER_GATEWAY_AUTH_TOKEN" # noqa: S105

@default("auth_token")
def _auth_token_default(self):
Expand Down Expand Up @@ -458,9 +458,9 @@ def _gateway_retry_max_default(self):
return int(os.environ.get(self.gateway_retry_max_env, self.gateway_retry_max_default_value))

gateway_token_renewer_class_default_value = (
"jupyter_server.gateway.gateway_client.NoOpTokenRenewer"
"jupyter_server.gateway.gateway_client.NoOpTokenRenewer" # noqa: S105
)
gateway_token_renewer_class_env = "JUPYTER_GATEWAY_TOKEN_RENEWER_CLASS"
gateway_token_renewer_class_env = "JUPYTER_GATEWAY_TOKEN_RENEWER_CLASS" # noqa: S105
gateway_token_renewer_class = Type(
klass=GatewayTokenRenewerBase,
config=True,
Expand Down
2 changes: 1 addition & 1 deletion jupyter_server/gateway/handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -238,7 +238,7 @@ async def _read_messages(self, callback):

# NOTE(esevan): if websocket is not disconnected by client, try to reconnect.
if not self.disconnected and self.retry < GatewayClient.instance().gateway_retry_max:
jitter = random.randint(10, 100) * 0.01
jitter = random.randint(10, 100) * 0.01 # noqa: S311
retry_interval = (
min(
GatewayClient.instance().gateway_retry_interval * (2**self.retry),
Expand Down
10 changes: 6 additions & 4 deletions jupyter_server/gateway/managers.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,10 @@
import datetime
import json
import os
from logging import Logger
from queue import Empty, Queue
from threading import Thread
from time import monotonic
from typing import Any, Optional, cast
from typing import TYPE_CHECKING, Any, Optional, cast

import websocket
from jupyter_client.asynchronous.client import AsyncKernelClient
Expand All @@ -34,6 +33,9 @@
from ..utils import url_path_join
from .gateway_client import GatewayClient, gateway_request

if TYPE_CHECKING:
from logging import Logger


class GatewayMappingKernelManager(AsyncMappingKernelManager):
"""Kernel manager that supports remote kernels hosted by Jupyter Kernel or Enterprise Gateway."""
Expand Down Expand Up @@ -126,7 +128,7 @@ async def list_kernels(self, **kwargs):
# Remove any of our kernels that may have been culled on the gateway server
our_kernels = self._kernels.copy()
culled_ids = []
for kid, _ in our_kernels.items():
for kid in our_kernels:
if kid not in kernel_models:
# The upstream kernel was not reported in the list of kernels.
self.log.warning(
Expand Down Expand Up @@ -231,7 +233,7 @@ def _get_endpoint_for_user_filter(default_endpoint):
"""Get the endpoint for a user filter."""
kernel_user = os.environ.get("KERNEL_USERNAME")
if kernel_user:
return "?user=".join([default_endpoint, kernel_user])
return f"{default_endpoint}?user={kernel_user}"
return default_endpoint

def _replace_path_kernelspec_resources(self, kernel_specs):
Expand Down
6 changes: 3 additions & 3 deletions jupyter_server/nbconvert/handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,15 +74,15 @@ def get_exporter(format, **kwargs):
raise web.HTTPError(500, "Could not import nbconvert: %s" % e) from e

try:
Exporter = get_exporter(format)
exporter = get_exporter(format)
except KeyError as e:
# should this be 400?
raise web.HTTPError(404, "No exporter for format: %s" % format) from e

try:
return Exporter(**kwargs)
return exporter(**kwargs)
except Exception as e:
app_log.exception("Could not construct Exporter: %s", Exporter)
app_log.exception("Could not construct Exporter: %s", exporter)
raise web.HTTPError(500, "Could not construct Exporter: %s" % e) from e


Expand Down
6 changes: 3 additions & 3 deletions jupyter_server/pytest_plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@
}


@pytest.fixture() # type:ignore[misc]
def jp_kernelspecs(jp_data_dir: Path) -> None: # noqa: PT004
@pytest.fixture # type:ignore[misc]
def jp_kernelspecs(jp_data_dir: Path) -> None:
"""Configures some sample kernelspecs in the Jupyter data directory."""
spec_names = ["sample", "sample2", "bad"]
for name in spec_names:
Expand All @@ -43,7 +43,7 @@ def jp_contents_manager(request, tmp_path):
return AsyncFileContentsManager(root_dir=str(tmp_path), use_atomic_writing=request.param)


@pytest.fixture()
@pytest.fixture
def jp_large_contents_manager(tmp_path):
"""Returns an AsyncLargeFileManager instance."""
return AsyncLargeFileManager(root_dir=str(tmp_path))
12 changes: 6 additions & 6 deletions jupyter_server/serverapp.py
Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,7 @@ def random_ports(port: int, n: int) -> t.Generator[int, None, None]:
for i in range(min(5, n)):
yield port + i
for _ in range(n - 5):
yield max(1, port + random.randint(-2 * n, 2 * n))
yield max(1, port + random.randint(-2 * n, 2 * n)) # noqa: S311


def load_handlers(name: str) -> t.Any:
Expand Down Expand Up @@ -372,7 +372,7 @@ def init_settings(
jenv_opt: dict[str, t.Any] = {"autoescape": True}
jenv_opt.update(jinja_env_options if jinja_env_options else {})

env = Environment(
env = Environment( # noqa: S701
loader=FileSystemLoader(template_path), extensions=["jinja2.ext.i18n"], **jenv_opt
)
sys_info = get_sys_info()
Expand Down Expand Up @@ -1210,9 +1210,9 @@ def _default_min_open_files_limit(self) -> t.Optional[int]:

soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE)

DEFAULT_SOFT = 4096
if hard >= DEFAULT_SOFT:
return DEFAULT_SOFT
default_soft = 4096
if hard >= default_soft:
return default_soft

self.log.debug(
"Default value for min_open_files_limit is ignored (hard=%r, soft=%r)",
Expand Down Expand Up @@ -2315,7 +2315,7 @@ def _get_urlparts(
if not self.ip:
ip = "localhost"
# Handle nonexplicit hostname.
elif self.ip in ("0.0.0.0", "::"):
elif self.ip in ("0.0.0.0", "::"): # noqa: S104
ip = "%s" % socket.gethostname()
else:
ip = f"[{self.ip}]" if ":" in self.ip else self.ip
Expand Down
25 changes: 14 additions & 11 deletions jupyter_server/services/contents/filemanager.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
# Distributed under the terms of the Modified BSD License.
from __future__ import annotations

import asyncio
import errno
import math
import mimetypes
Expand Down Expand Up @@ -705,11 +706,13 @@ def _get_dir_size(self, path="."):
if platform.system() == "Darwin":
# returns the size of the folder in KB
result = subprocess.run(
["du", "-sk", path], capture_output=True, check=True
["du", "-sk", path], # noqa: S607
capture_output=True,
check=True,
).stdout.split()
else:
result = subprocess.run(
["du", "-s", "--block-size=1", path],
["du", "-s", "--block-size=1", path], # noqa: S607
capture_output=True,
check=True,
).stdout.split()
Expand Down Expand Up @@ -1185,18 +1188,18 @@ async def _get_dir_size(self, path: str = ".") -> str:
try:
if platform.system() == "Darwin":
# returns the size of the folder in KB
result = subprocess.run(
["du", "-sk", path], capture_output=True, check=True
).stdout.split()
args = ["-sk", path]
else:
result = subprocess.run(
["du", "-s", "--block-size=1", path],
capture_output=True,
check=True,
).stdout.split()
args = ["-s", "--block-size=1", path]
proc = await asyncio.create_subprocess_exec(
"du", *args, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
)

stdout, _ = await proc.communicate()
result = await proc.wait()
self.log.info(f"current status of du command {result}")
size = result[0].decode("utf-8")
assert result == 0
size = stdout.decode("utf-8").split()[0]
except Exception:
self.log.warning(
"Not able to get the size of the %s directory. Copying might be slow if the directory is large!",
Expand Down
2 changes: 1 addition & 1 deletion jupyter_server/services/contents/largefilemanager.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,5 +151,5 @@ async def _save_large_file(self, os_path, content, format):
with self.perm_to_403(os_path):
if os.path.islink(os_path):
os_path = os.path.join(os.path.dirname(os_path), os.readlink(os_path))
with open(os_path, "ab") as f:
with open(os_path, "ab") as f: # noqa: ASYNC101
await run_sync(f.write, bcontent)
7 changes: 5 additions & 2 deletions jupyter_server/services/events/handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,8 @@

import json
from datetime import datetime
from typing import Any, Dict, Optional, cast
from typing import TYPE_CHECKING, Any, Dict, Optional, cast

import jupyter_events.logger
from jupyter_core.utils import ensure_async
from tornado import web, websocket

Expand All @@ -21,6 +20,10 @@
AUTH_RESOURCE = "events"


if TYPE_CHECKING:
import jupyter_events.logger


class SubscribeWebsocket(
JupyterHandler,
websocket.WebSocketHandler,
Expand Down
Loading
Loading