Bump linter versions (#188)

The isort we had has some weird poetry issue, I figured I might as well
bump the other linters at the same time

```
[INFO] Installing environment for https://github.com/PyCQA/isort.
[INFO] Once installed this environment will be reused.
[INFO] This may take a few minutes...
An unexpected error has occurred: CalledProcessError: command: ('/home/runner/.cache/pre-commit/repo0m3eczdf/py_env-python3.7/bin/python', '-mpip', 'install', '.')
return code: 1
stdout:
    Processing /home/runner/.cache/pre-commit/repo0m3eczdf
      Installing build dependencies: started
      Installing build dependencies: finished with status 'done'
      Getting requirements to build wheel: started
      Getting requirements to build wheel: finished with status 'done'
      Preparing metadata (pyproject.toml): started
      Preparing metadata (pyproject.toml): finished with status 'error'

stderr:
      error: subprocess-exited-with-error

      × Preparing metadata (pyproject.toml) did not run successfully.
      │ exit code: 1
      ╰─> [14 lines of output]
          Traceback (most recent call last):
            File "/home/runner/.cache/pre-commit/repo0m3eczdf/py_env-python3.7/lib/python3.7/site-packages/pip/_vendor/pyproject_hooks/_in_process/_in_process.py", line 353, in <module>
              main()
            File "/home/runner/.cache/pre-commit/repo0m3eczdf/py_env-python3.7/lib/python3.7/site-packages/pip/_vendor/pyproject_hooks/_in_process/_in_process.py", line 335, in main
              json_out['return_val'] = hook(**hook_input['kwargs'])
            File "/home/runner/.cache/pre-commit/repo0m3eczdf/py_env-python3.7/lib/python3.7/site-packages/pip/_vendor/pyproject_hooks/_in_process/_in_process.py", line 149, in prepare_metadata_for_build_wheel
              return hook(metadata_directory, config_settings)
            File "/tmp/pip-build-env-beaf5dxh/overlay/lib/python3.7/site-packages/poetry/core/masonry/api.py", line 40, in prepare_metadata_for_build_wheel
              poetry = Factory().create_poetry(Path(".").resolve(), with_groups=False)
            File "/tmp/pip-build-env-beaf5dxh/overlay/lib/python3.7/site-packages/poetry/core/factory.py", line 57, in create_poetry
              raise RuntimeError("The Poetry configuration is invalid:\n" + message)
          RuntimeError: The Poetry configuration is invalid:
            - [extras.pipfile_deprecated_finder.2] 'pip-shims<=0.3.4' does not match '^[a-zA-Z-_.0-9]+$'

          [end of output]

      note: This error originates from a subprocess, and is likely not a problem with pip.
    error: metadata-generation-failed

    × Encountered error while generating package metadata.
    ╰─> See above for output.

    note: This is an issue with the package mentioned above, not pip.
    hint: See above for details.
```
This commit is contained in:
Val Lorentz 2023-03-04 10:51:40 +01:00 committed by GitHub
parent 5364f963ae
commit 136a7923c0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 30 additions and 25 deletions

View File

@ -2,13 +2,13 @@ exclude: ^irctest/scram
repos: repos:
- repo: https://github.com/psf/black - repo: https://github.com/psf/black
rev: 22.3.0 rev: 23.1.0
hooks: hooks:
- id: black - id: black
language_version: python3 language_version: python3
- repo: https://github.com/PyCQA/isort - repo: https://github.com/PyCQA/isort
rev: 5.5.2 rev: 5.11.5
hooks: hooks:
- id: isort - id: isort
@ -18,6 +18,7 @@ repos:
- id: flake8 - id: flake8
- repo: https://github.com/pre-commit/mirrors-mypy - repo: https://github.com/pre-commit/mirrors-mypy
rev: v0.812 rev: v1.0.1
hooks: hooks:
- id: mypy - id: mypy
additional_dependencies: [types-PyYAML, types-docutils]

View File

@ -173,7 +173,7 @@ class _IrcTestCase(Generic[TController]):
) -> Optional[str]: ) -> Optional[str]:
"""Returns an error message if the message doesn't match the given arguments, """Returns an error message if the message doesn't match the given arguments,
or None if it matches.""" or None if it matches."""
for (key, value) in kwargs.items(): for key, value in kwargs.items():
if getattr(msg, key) != value: if getattr(msg, key) != value:
fail_msg = ( fail_msg = (
fail_msg or "expected {param} to be {expects}, got {got}: {msg}" fail_msg or "expected {param} to be {expects}, got {got}: {msg}"
@ -351,8 +351,8 @@ class BaseClientTestCase(_IrcTestCase[basecontrollers.BaseClientController]):
nick: Optional[str] = None nick: Optional[str] = None
user: Optional[List[str]] = None user: Optional[List[str]] = None
server: socket.socket server: socket.socket
protocol_version = Optional[str] protocol_version: Optional[str]
acked_capabilities = Optional[Set[str]] acked_capabilities: Optional[Set[str]]
__new__ = object.__new__ # pytest won't collect Generic[] subclasses otherwise __new__ = object.__new__ # pytest won't collect Generic[] subclasses otherwise
@ -448,7 +448,9 @@ class BaseClientTestCase(_IrcTestCase[basecontrollers.BaseClientController]):
print("{:.3f} S: {}".format(time.time(), line.strip())) print("{:.3f} S: {}".format(time.time(), line.strip()))
def readCapLs( def readCapLs(
self, auth: Optional[Authentication] = None, tls_config: tls.TlsConfig = None self,
auth: Optional[Authentication] = None,
tls_config: Optional[tls.TlsConfig] = None,
) -> None: ) -> None:
(hostname, port) = self.server.getsockname() (hostname, port) = self.server.getsockname()
self.controller.run( self.controller.run(
@ -458,9 +460,9 @@ class BaseClientTestCase(_IrcTestCase[basecontrollers.BaseClientController]):
m = self.getMessage() m = self.getMessage()
self.assertEqual(m.command, "CAP", "First message is not CAP LS.") self.assertEqual(m.command, "CAP", "First message is not CAP LS.")
if m.params == ["LS"]: if m.params == ["LS"]:
self.protocol_version = 301 self.protocol_version = "301"
elif m.params == ["LS", "302"]: elif m.params == ["LS", "302"]:
self.protocol_version = 302 self.protocol_version = "302"
elif m.params == ["END"]: elif m.params == ["END"]:
self.protocol_version = None self.protocol_version = None
else: else:
@ -689,7 +691,7 @@ class BaseServerTestCase(
def connectClient( def connectClient(
self, self,
nick: str, nick: str,
name: TClientName = None, name: Optional[TClientName] = None,
capabilities: Optional[List[str]] = None, capabilities: Optional[List[str]] = None,
skip_if_cap_nak: bool = False, skip_if_cap_nak: bool = False,
show_io: Optional[bool] = None, show_io: Optional[bool] = None,
@ -734,8 +736,8 @@ class BaseServerTestCase(
self.server_support[param] = None self.server_support[param] = None
welcome.append(m) welcome.append(m)
self.targmax: Dict[str, Optional[str]] = dict( self.targmax: Dict[str, Optional[str]] = dict( # type: ignore[assignment]
item.split(":", 1) # type: ignore item.split(":", 1)
for item in (self.server_support.get("TARGMAX") or "").split(",") for item in (self.server_support.get("TARGMAX") or "").split(",")
if item if item
) )

View File

@ -39,7 +39,7 @@ class CaseResult:
type: Optional[str] = None type: Optional[str] = None
message: Optional[str] = None message: Optional[str] = None
def output_filename(self): def output_filename(self) -> str:
test_name = self.test_name test_name = self.test_name
if len(test_name) > 50 or set(test_name) & NETLIFY_CHAR_BLACKLIST: if len(test_name) > 50 or set(test_name) & NETLIFY_CHAR_BLACKLIST:
# File name too long or otherwise invalid. This should be good enough: # File name too long or otherwise invalid. This should be good enough:
@ -75,7 +75,7 @@ def iter_job_results(job_file_name: Path, job: ET.ElementTree) -> Iterator[CaseR
skipped = False skipped = False
details = None details = None
system_out = None system_out = None
extra = {} extra: Dict[str, str] = {}
for child in case: for child in case:
if child.tag == "skipped": if child.tag == "skipped":
success = True success = True
@ -187,7 +187,7 @@ def build_test_table(jobs: List[str], results: List[CaseResult]) -> ET.Element:
ET.SubElement(ET.SubElement(cell, "div"), "span").text = job ET.SubElement(ET.SubElement(cell, "div"), "span").text = job
cell.set("class", "job-name") cell.set("class", "job-name")
for ((module_name, class_name), class_results) in sorted( for (module_name, class_name), class_results in sorted(
results_by_module_and_class.items() results_by_module_and_class.items()
): ):
if multiple_modules: if multiple_modules:
@ -220,7 +220,7 @@ def build_test_table(jobs: List[str], results: List[CaseResult]) -> ET.Element:
# One row for each test: # One row for each test:
results_by_test = group_by(class_results, key=lambda r: r.test_name) results_by_test = group_by(class_results, key=lambda r: r.test_name)
for (test_name, test_results) in sorted(results_by_test.items()): for test_name, test_results in sorted(results_by_test.items()):
row_anchor = f"{qualified_class_name}.{test_name}" row_anchor = f"{qualified_class_name}.{test_name}"
if len(row_anchor) >= 50: if len(row_anchor) >= 50:
# Too long; give up on generating readable URL # Too long; give up on generating readable URL
@ -314,7 +314,7 @@ def write_html_pages(
pages = [] pages = []
for (module_name, module_results) in sorted(results_by_module.items()): for module_name, module_results in sorted(results_by_module.items()):
# Filter out client jobs if this is a server test module, and vice versa # Filter out client jobs if this is a server test module, and vice versa
module_categories = { module_categories = {
job_categories[result.job] job_categories[result.job]
@ -366,7 +366,7 @@ def write_html_index(output_dir: Path, pages: List[Tuple[str, str, str]]) -> Non
module_pages = [] module_pages = []
job_pages = [] job_pages = []
for (page_type, title, file_name) in sorted(pages): for page_type, title, file_name in sorted(pages):
if page_type == "module": if page_type == "module":
module_pages.append((title, file_name)) module_pages.append((title, file_name))
elif page_type == "job": elif page_type == "job":
@ -379,7 +379,7 @@ def write_html_index(output_dir: Path, pages: List[Tuple[str, str, str]]) -> Non
dl = ET.SubElement(body, "dl") dl = ET.SubElement(body, "dl")
dl.set("class", "module-index") dl.set("class", "module-index")
for (module_name, file_name) in sorted(module_pages): for module_name, file_name in sorted(module_pages):
module = importlib.import_module(module_name) module = importlib.import_module(module_name)
link = ET.SubElement(ET.SubElement(dl, "dt"), "a", href=f"./{file_name}") link = ET.SubElement(ET.SubElement(dl, "dt"), "a", href=f"./{file_name}")
@ -391,7 +391,7 @@ def write_html_index(output_dir: Path, pages: List[Tuple[str, str, str]]) -> Non
ul = ET.SubElement(body, "ul") ul = ET.SubElement(body, "ul")
ul.set("class", "job-index") ul.set("class", "job-index")
for (job, file_name) in sorted(job_pages): for job, file_name in sorted(job_pages):
link = ET.SubElement(ET.SubElement(ul, "li"), "a", href=f"./{file_name}") link = ET.SubElement(ET.SubElement(ul, "li"), "a", href=f"./{file_name}")
link.text = job link.text = job

View File

@ -18,7 +18,7 @@ class Artifact:
download_url: str download_url: str
@property @property
def public_download_url(self): def public_download_url(self) -> str:
# GitHub API is not available publicly for artifacts, we need to use # GitHub API is not available publicly for artifacts, we need to use
# a third-party proxy to access it... # a third-party proxy to access it...
name = urllib.parse.quote(self.name) name = urllib.parse.quote(self.name)

View File

@ -152,7 +152,7 @@ def match_dict(
# Set to not-None if we find a Keys() operator in the dict keys # Set to not-None if we find a Keys() operator in the dict keys
remaining_keys_wildcard = None remaining_keys_wildcard = None
for (expected_key, expected_value) in expected.items(): for expected_key, expected_value in expected.items():
if isinstance(expected_key, RemainingKeys): if isinstance(expected_key, RemainingKeys):
remaining_keys_wildcard = (expected_key.key, expected_value) remaining_keys_wildcard = (expected_key.key, expected_value)
else: else:
@ -168,7 +168,7 @@ def match_dict(
if remaining_keys_wildcard: if remaining_keys_wildcard:
(expected_key, expected_value) = remaining_keys_wildcard (expected_key, expected_value) = remaining_keys_wildcard
for (key, value) in got.items(): for key, value in got.items():
if not match_string(key, expected_key): if not match_string(key, expected_key):
return False return False
if not match_string(value, expected_value): if not match_string(value, expected_value):

View File

@ -263,7 +263,6 @@ def upload_steps(software_id):
def generate_workflow(config: dict, version_flavor: VersionFlavor): def generate_workflow(config: dict, version_flavor: VersionFlavor):
on: dict on: dict
if version_flavor == VersionFlavor.STABLE: if version_flavor == VersionFlavor.STABLE:
on = {"push": None, "pull_request": None} on = {"push": None, "pull_request": None}

View File

@ -12,6 +12,9 @@ disallow_untyped_defs = False
[mypy-irctest.client_tests.*] [mypy-irctest.client_tests.*]
disallow_untyped_defs = False disallow_untyped_defs = False
[mypy-irctest.self_tests.*]
disallow_untyped_defs = False
[mypy-defusedxml.*] [mypy-defusedxml.*]
ignore_missing_imports = True ignore_missing_imports = True

View File

@ -42,7 +42,7 @@ def partial_compaction(d):
# tests separate # tests separate
compacted_d = {} compacted_d = {}
successes = [] successes = []
for (k, v) in d.items(): for k, v in d.items():
if isinstance(v, CompactedResult) and v.success and v.nb_skipped == 0: if isinstance(v, CompactedResult) and v.success and v.nb_skipped == 0:
successes.append((k, v)) successes.append((k, v))
else: else: