Datasets:

Modalities:
Tabular
Text
Formats:
json
Size:
< 1K
ArXiv:
DOI:
Libraries:
Datasets
Dask
File size: 60,005 Bytes
8f0a8fd
1
{"language": "Python", "id": 8, "repo_owner": "scrapy", "repo_name": "scrapy", "head_branch": "simplify-attempt2", "workflow_name": "Checks", "workflow_filename": "checks.yml", "workflow_path": ".github/workflows/checks.yml", "contributor": "monicaq21", "sha_fail": "b15d4bd9177149b88d1b0f719e7e6290df81fe9a", "sha_success": "f629f8db5fa0bb8f46fbf64264c1a2d0dec55cfc", "workflow": "name: Checks\non: [push, pull_request]\n\nconcurrency:\n  group: ${{github.workflow}}-${{ github.ref }}\n  cancel-in-progress: true\n\njobs:\n  checks:\n    runs-on: ubuntu-latest\n    strategy:\n      fail-fast: false\n      matrix:\n        include:\n        - python-version: \"3.12\"\n          env:\n            TOXENV: pylint\n        - python-version: 3.8\n          env:\n            TOXENV: typing\n        - python-version: \"3.11\"  # Keep in sync with .readthedocs.yml\n          env:\n            TOXENV: docs\n        - python-version: \"3.12\"\n          env:\n            TOXENV: twinecheck\n\n    steps:\n    - uses: actions/checkout@v4\n\n    - name: Set up Python ${{ matrix.python-version }}\n      uses: actions/setup-python@v4\n      with:\n        python-version: ${{ matrix.python-version }}\n\n    - name: Run check\n      env: ${{ matrix.env }}\n      run: |\n        pip install -U tox\n        tox\n\n  pre-commit:\n    runs-on: ubuntu-latest\n    steps:\n    - uses: actions/checkout@v4\n    - uses: pre-commit/[email protected]\n", "logs": [{"step_name": "pre-commit/3_Run [email protected]", "log": "##[group]Run pre-commit/[email protected]\nwith:\n  extra_args: --all-files\n##[endgroup]\n##[group]Run python -m pip install pre-commit\n\u001b[36;1mpython -m pip install pre-commit\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\n##[endgroup]\nDefaulting to user installation because normal site-packages is not writeable\nCollecting pre-commit\n  Downloading pre_commit-3.5.0-py2.py3-none-any.whl (203 kB)\n     \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 203.7/203.7 KB 8.3 MB/s eta 0:00:00\nCollecting nodeenv>=0.11.1\n  Downloading nodeenv-1.8.0-py2.py3-none-any.whl (22 kB)\nRequirement already satisfied: pyyaml>=5.1 in /usr/lib/python3/dist-packages (from pre-commit) (5.4.1)\nCollecting identify>=1.0.0\n  Downloading identify-2.5.32-py2.py3-none-any.whl (98 kB)\n     \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 98.9/98.9 KB 12.6 MB/s eta 0:00:00\nCollecting cfgv>=2.0.0\n  Downloading cfgv-3.4.0-py2.py3-none-any.whl (7.2 kB)\nCollecting virtualenv>=20.10.0\n  Downloading virtualenv-20.25.0-py3-none-any.whl (3.8 MB)\n     \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 3.8/3.8 MB 27.4 MB/s eta 0:00:00\nRequirement already satisfied: setuptools in /usr/lib/python3/dist-packages (from nodeenv>=0.11.1->pre-commit) (59.6.0)\nCollecting platformdirs<5,>=3.9.1\n  Downloading platformdirs-4.1.0-py3-none-any.whl (17 kB)\nCollecting filelock<4,>=3.12.2\n  Downloading filelock-3.13.1-py3-none-any.whl (11 kB)\nCollecting distlib<1,>=0.3.7\n  Downloading distlib-0.3.7-py2.py3-none-any.whl (468 kB)\n     \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 468.9/468.9 KB 43.3 MB/s eta 0:00:00\nInstalling collected packages: distlib, platformdirs, nodeenv, identify, filelock, cfgv, virtualenv, pre-commit\nSuccessfully installed cfgv-3.4.0 distlib-0.3.7 filelock-3.13.1 identify-2.5.32 nodeenv-1.8.0 platformdirs-4.1.0 pre-commit-3.5.0 virtualenv-20.25.0\n##[group]Run python -m pip freeze --local\n\u001b[36;1mpython -m pip freeze --local\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\n##[endgroup]\nargcomplete==3.1.6\ncfgv==3.4.0\ndistlib==0.3.7\nfilelock==3.13.1\nidentify==2.5.32\nnodeenv==1.8.0\npackaging==23.2\npipx==1.2.1\nplatformdirs==4.1.0\npre-commit==3.5.0\nuserpath==1.9.1\nvirtualenv==20.25.0\n##[group]Run actions/cache@v3\nwith:\n  path: ~/.cache/pre-commit\n  key: pre-commit-3||7a8fe885594aed9a90fd5938b4bb49b65732538a44c08aad3d6ea69d9d0cf64c\n  enableCrossOsArchive: false\n  fail-on-cache-miss: false\n  lookup-only: false\n##[endgroup]\nCache Size: ~33 MB (34938818 B)\n[command]/usr/bin/tar -xf /home/runner/work/_temp/49db49b7-4865-4c29-942c-cd4505f101c6/cache.tzst -P -C /home/runner/work/scrapy/scrapy --use-compress-program unzstd\nCache restored successfully\nCache restored from key: pre-commit-3||7a8fe885594aed9a90fd5938b4bb49b65732538a44c08aad3d6ea69d9d0cf64c\n##[group]Run pre-commit run --show-diff-on-failure --color=always --all-files\n\u001b[36;1mpre-commit run --show-diff-on-failure --color=always --all-files\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\n##[endgroup]\nbandit...................................................................\u001b[42mPassed\u001b[m\nflake8...................................................................\u001b[41mFailed\u001b[m\n\u001b[2m- hook id: flake8\u001b[m\n\u001b[2m- exit code: 1\u001b[m\n\n\u001b[1mscrapy/core/downloader/contextfactory.py\u001b[m\u001b[36m:\u001b[m182\u001b[36m:\u001b[m42\u001b[36m:\u001b[m \u001b[1m\u001b[31mE251\u001b[m unexpected spaces around keyword / parameter equals\n\u001b[1mscrapy/core/downloader/contextfactory.py\u001b[m\u001b[36m:\u001b[m182\u001b[36m:\u001b[m44\u001b[36m:\u001b[m \u001b[1m\u001b[31mE251\u001b[m unexpected spaces around keyword / parameter equals\n\u001b[1mscrapy/utils/misc.py\u001b[m\u001b[36m:\u001b[m51\u001b[36m:\u001b[m17\u001b[36m:\u001b[m \u001b[1m\u001b[31mW291\u001b[m trailing whitespace\n\u001b[1mscrapy/utils/misc.py\u001b[m\u001b[36m:\u001b[m55\u001b[36m:\u001b[m31\u001b[36m:\u001b[m \u001b[1m\u001b[31mW291\u001b[m trailing whitespace\n\u001b[1mscrapy/utils/misc.py\u001b[m\u001b[36m:\u001b[m57\u001b[36m:\u001b[m12\u001b[36m:\u001b[m \u001b[1m\u001b[31mW291\u001b[m trailing whitespace\n\u001b[1mscrapy/utils/misc.py\u001b[m\u001b[36m:\u001b[m63\u001b[36m:\u001b[m1\u001b[36m:\u001b[m \u001b[1m\u001b[31mE302\u001b[m expected 2 blank lines, found 1\n\u001b[1mscrapy/utils/misc.py\u001b[m\u001b[36m:\u001b[m180\u001b[36m:\u001b[m24\u001b[36m:\u001b[m \u001b[1m\u001b[31mW291\u001b[m trailing whitespace\n\u001b[1mscrapy/utils/misc.py\u001b[m\u001b[36m:\u001b[m191\u001b[36m:\u001b[m1\u001b[36m:\u001b[m \u001b[1m\u001b[31mW293\u001b[m blank line contains whitespace\n\u001b[1mscrapy/utils/misc.py\u001b[m\u001b[36m:\u001b[m193\u001b[36m:\u001b[m1\u001b[36m:\u001b[m \u001b[1m\u001b[31mE265\u001b[m block comment should start with '# '\n\u001b[1mscrapy/utils/misc.py\u001b[m\u001b[36m:\u001b[m198\u001b[36m:\u001b[m25\u001b[36m:\u001b[m \u001b[1m\u001b[31mW291\u001b[m trailing whitespace\n\u001b[1mscrapy/utils/misc.py\u001b[m\u001b[36m:\u001b[m213\u001b[36m:\u001b[m1\u001b[36m:\u001b[m \u001b[1m\u001b[31mE303\u001b[m too many blank lines (4)\n\u001b[1mtests/test_addons.py\u001b[m\u001b[36m:\u001b[m112\u001b[36m:\u001b[m41\u001b[36m:\u001b[m \u001b[1m\u001b[31mE999\u001b[m IndentationError: unindent does not match any outer indentation level\n\u001b[1mtests/test_utils_misc/__init__.py\u001b[m\u001b[36m:\u001b[m98\u001b[36m:\u001b[m5\u001b[36m:\u001b[m \u001b[1m\u001b[31mE301\u001b[m expected 1 blank line, found 0\n\u001b[1mtests/test_utils_misc/__init__.py\u001b[m\u001b[36m:\u001b[m125\u001b[36m:\u001b[m35\u001b[36m:\u001b[m \u001b[1m\u001b[31mF821\u001b[m undefined name 'settings'\n\u001b[1mtests/test_utils_misc/__init__.py\u001b[m\u001b[36m:\u001b[m133\u001b[36m:\u001b[m1\u001b[36m:\u001b[m \u001b[1m\u001b[31mW293\u001b[m blank line contains whitespace\n\nblack....................................................................\u001b[41mFailed\u001b[m\n\u001b[2m- hook id: black\u001b[m\n\u001b[2m- exit code: 123\u001b[m\n\u001b[2m- files were modified by this hook\u001b[m\n\n\u001b[1mreformatted scrapy/addons.py\u001b[0m\n\u001b[1mreformatted scrapy/core/downloader/handlers/__init__.py\u001b[0m\n\u001b[1mreformatted scrapy/core/downloader/contextfactory.py\u001b[0m\n\u001b[1mreformatted scrapy/core/engine.py\u001b[0m\n\u001b[1mreformatted scrapy/crawler.py\u001b[0m\n\u001b[1mreformatted scrapy/utils/misc.py\u001b[0m\n\u001b[31merror: cannot format tests/test_addons.py: unindent does not match any outer indentation level (<tokenize>, line 112)\u001b[0m\n\u001b[1mreformatted tests/test_downloader_handlers.py\u001b[0m\n\u001b[1mreformatted tests/test_utils_misc/__init__.py\u001b[0m\n\n\u001b[1mOh no! \ud83d\udca5 \ud83d\udc94 \ud83d\udca5\u001b[0m\n\u001b[34m\u001b[1m8 files \u001b[0m\u001b[1mreformatted\u001b[0m, \u001b[34m332 files \u001b[0mleft unchanged, \u001b[31m1 file failed to reformat\u001b[0m.\n\nisort....................................................................\u001b[41mFailed\u001b[m\n\u001b[2m- hook id: isort\u001b[m\n\u001b[2m- files were modified by this hook\u001b[m\n\nFixing /home/runner/work/scrapy/scrapy/scrapy/core/downloader/handlers/http10.py\nFixing /home/runner/work/scrapy/scrapy/scrapy/extensions/feedexport.py\n\nblacken-docs.............................................................\u001b[42mPassed\u001b[m\npre-commit hook(s) made changes.\nIf you are seeing this message in CI, reproduce locally with: `pre-commit run --all-files`.\nTo run `pre-commit` as part of git workflow, use `pre-commit install`.\nAll changes made by hooks:\n\u001b[1mdiff --git a/scrapy/addons.py b/scrapy/addons.py\u001b[m\n\u001b[1mindex a6efba3..cb1574b 100644\u001b[m\n\u001b[1m--- a/scrapy/addons.py\u001b[m\n\u001b[1m+++ b/scrapy/addons.py\u001b[m\n\u001b[36m@@ -33,9 +33,7 @@\u001b[m \u001b[mclass AddonManager:\u001b[m\n             try:\u001b[m\n                 addoncls = load_object(clspath)\u001b[m\n                 # changes create_instance call to build_from_settings\u001b[m\n\u001b[31m-                addon = build_from_settings(\u001b[m\n\u001b[31m-                    addoncls, settings=settings\u001b[m\n\u001b[31m-                )\u001b[m\n\u001b[32m+\u001b[m\u001b[32m                addon = build_from_settings(addoncls, settings=settings)\u001b[m\n                 addon.update_settings(settings)\u001b[m\n                 self.addons.append(addon)\u001b[m\n             except NotConfigured as e:\u001b[m\n\u001b[1mdiff --git a/scrapy/core/downloader/contextfactory.py b/scrapy/core/downloader/contextfactory.py\u001b[m\n\u001b[1mindex 6d3ddd4..73a3d32 100644\u001b[m\n\u001b[1m--- a/scrapy/core/downloader/contextfactory.py\u001b[m\n\u001b[1m+++ b/scrapy/core/downloader/contextfactory.py\u001b[m\n\u001b[36m@@ -178,9 +178,7 @@\u001b[m \u001b[mdef load_context_factory_from_settings(settings, crawler):\u001b[m\n     except TypeError:\u001b[m\n         # use context factory defaults\u001b[m\n         # changes create_instance call to build_from_settings\u001b[m\n\u001b[31m-        context_factory = build_from_settings(\u001b[m\n\u001b[31m-            context_factory_cls, settings = settings\u001b[m\n\u001b[31m-        )\u001b[m\n\u001b[32m+\u001b[m\u001b[32m        context_factory = build_from_settings(context_factory_cls, settings=settings)\u001b[m\n         # context_factory = create_instance(\u001b[m\n         #     objcls=context_factory_cls,\u001b[m\n         #     settings=settings,\u001b[m\n\u001b[1mdiff --git a/scrapy/core/downloader/handlers/__init__.py b/scrapy/core/downloader/handlers/__init__.py\u001b[m\n\u001b[1mindex d5b8ce8..fc597b0 100644\u001b[m\n\u001b[1m--- a/scrapy/core/downloader/handlers/__init__.py\u001b[m\n\u001b[1m+++ b/scrapy/core/downloader/handlers/__init__.py\u001b[m\n\u001b[36m@@ -56,9 +56,7 @@\u001b[m \u001b[mclass DownloadHandlers:\u001b[m\n             if skip_lazy and getattr(dhcls, \"lazy\", True):\u001b[m\n                 return None\u001b[m\n             # change create_instance call to build_from_settings\u001b[m\n\u001b[31m-            dh = build_from_settings(\u001b[m\n\u001b[31m-                dhcls, settings=self._crawler.settings\u001b[m\n\u001b[31m-            )\u001b[m\n\u001b[32m+\u001b[m\u001b[32m            dh = build_from_settings(dhcls, settings=self._crawler.settings)\u001b[m\n             # dh = create_instance(\u001b[m\n             #     objcls=dhcls,\u001b[m\n             #     settings=self._crawler.settings,\u001b[m\n\u001b[1mdiff --git a/scrapy/core/downloader/handlers/http10.py b/scrapy/core/downloader/handlers/http10.py\u001b[m\n\u001b[1mindex 76faf7d..5c01ba4 100644\u001b[m\n\u001b[1m--- a/scrapy/core/downloader/handlers/http10.py\u001b[m\n\u001b[1m+++ b/scrapy/core/downloader/handlers/http10.py\u001b[m\n\u001b[36m@@ -1,6 +1,6 @@\u001b[m\n \"\"\"Download handlers for http and https schemes\u001b[m\n \"\"\"\u001b[m\n\u001b[31m-from scrapy.utils.misc import build_from_settings, build_from_crawler, load_object\u001b[m\n\u001b[32m+\u001b[m\u001b[32mfrom scrapy.utils.misc import build_from_crawler, build_from_settings, load_object\u001b[m\n from scrapy.utils.python import to_unicode\u001b[m\n \u001b[m\n \u001b[m\n\u001b[1mdiff --git a/scrapy/core/engine.py b/scrapy/core/engine.py\u001b[m\n\u001b[1mindex 81aacdf..281dc8a 100644\u001b[m\n\u001b[1m--- a/scrapy/core/engine.py\u001b[m\n\u001b[1m+++ b/scrapy/core/engine.py\u001b[m\n\u001b[36m@@ -358,9 +358,7 @@\u001b[m \u001b[mclass ExecutionEngine:\u001b[m\n             raise RuntimeError(f\"No free spider slot when opening {spider.name!r}\")\u001b[m\n         logger.info(\"Spider opened\", extra={\"spider\": spider})\u001b[m\n         nextcall = CallLaterOnce(self._next_request)\u001b[m\n\u001b[31m-        scheduler = build_from_crawler(\u001b[m\n\u001b[31m-            self.scheduler_cls, crawler=self.crawler\u001b[m\n\u001b[31m-        )\u001b[m\n\u001b[32m+\u001b[m\u001b[32m        scheduler = build_from_crawler(self.scheduler_cls, crawler=self.crawler)\u001b[m\n         start_requests = yield self.scraper.spidermw.process_start_requests(\u001b[m\n             start_requests, spider\u001b[m\n         )\u001b[m\n\u001b[1mdiff --git a/scrapy/crawler.py b/scrapy/crawler.py\u001b[m\n\u001b[1mindex a1f699b..1280953 100644\u001b[m\n\u001b[1m--- a/scrapy/crawler.py\u001b[m\n\u001b[1m+++ b/scrapy/crawler.py\u001b[m\n\u001b[36m@@ -111,7 +111,8 @@\u001b[m \u001b[mclass Crawler:\u001b[m\n \u001b[m\n         # changes create_instance call to build_from_settings\u001b[m\n         self.request_fingerprinter = build_from_settings(\u001b[m\n\u001b[31m-            load_object(self.settings[\"REQUEST_FINGERPRINTER_CLASS\"]), settings=self.settings\u001b[m\n\u001b[32m+\u001b[m\u001b[32m            load_object(self.settings[\"REQUEST_FINGERPRINTER_CLASS\"]),\u001b[m\n\u001b[32m+\u001b[m\u001b[32m            settings=self.settings,\u001b[m\n         )\u001b[m\n \u001b[m\n         # self.request_fingerprinter = create_instance(\u001b[m\n\u001b[1mdiff --git a/scrapy/extensions/feedexport.py b/scrapy/extensions/feedexport.py\u001b[m\n\u001b[1mindex a4c8a47..62fb07e 100644\u001b[m\n\u001b[1m--- a/scrapy/extensions/feedexport.py\u001b[m\n\u001b[1m+++ b/scrapy/extensions/feedexport.py\u001b[m\n\u001b[36m@@ -28,7 +28,7 @@\u001b[m \u001b[mfrom scrapy.utils.defer import maybe_deferred_to_future\u001b[m\n from scrapy.utils.deprecate import create_deprecated_class\u001b[m\n from scrapy.utils.ftp import ftp_store_file\u001b[m\n from scrapy.utils.log import failure_to_exc_info\u001b[m\n\u001b[31m-from scrapy.utils.misc import build_from_settings, build_from_crawler, load_object\u001b[m\n\u001b[32m+\u001b[m\u001b[32mfrom scrapy.utils.misc import build_from_crawler, build_from_settings, load_object\u001b[m\n from scrapy.utils.python import without_none_values\u001b[m\n \u001b[m\n logger = logging.getLogger(__name__)\u001b[m\n\u001b[1mdiff --git a/scrapy/utils/misc.py b/scrapy/utils/misc.py\u001b[m\n\u001b[1mindex 613328a..63270e9 100644\u001b[m\n\u001b[1m--- a/scrapy/utils/misc.py\u001b[m\n\u001b[1m+++ b/scrapy/utils/misc.py\u001b[m\n\u001b[36m@@ -47,19 +47,21 @@\u001b[m \u001b[mdef arg_to_iter(arg: Any) -> Iterable[Any]:\u001b[m\n         return cast(Iterable[Any], arg)\u001b[m\n     return [arg]\u001b[m\n \u001b[m\n\u001b[32m+\u001b[m\n # def build_from_crawler(objcls, crawler, none, *args, **kwargs):\u001b[m\n\u001b[31m-#     if crawler \u001b[m\n\u001b[32m+\u001b[m\u001b[32m#     if crawler\u001b[m\n #         if hasattr(objcls, \"from_crawler\"):\u001b[m\n #             instance = objcls.from_crawler(crawler, *args, **kwargs)\u001b[m\n #             method_name = \"from_crawler\"\u001b[m\n\u001b[31m-#         if instance is None: \u001b[m\n\u001b[32m+\u001b[m\u001b[32m#         if instance is None:\u001b[m\n #             raise TypeError(f\"{objcls.__qualname__}.{method_name} returned None)\u001b[m\n\u001b[31m-#     else: \u001b[m\n\u001b[32m+\u001b[m\u001b[32m#     else:\u001b[m\n #         instance = objcls(*args, **kwargs)\u001b[m\n #         method_name = \"__new__\"\u001b[m\n \u001b[m\n #     return instance\u001b[m\n \u001b[m\n\u001b[32m+\u001b[m\n def load_object(path: Union[str, Callable]) -> Any:\u001b[m\n     \"\"\"Load an object given its absolute object path, and return it.\u001b[m\n \u001b[m\n\u001b[36m@@ -177,7 +179,7 @@\u001b[m \u001b[mdef rel_has_nofollow(rel: Optional[str]) -> bool:\u001b[m\n #  Raises typeError is instance is None\u001b[m\n # Creates a class instance using 'from_crawler' constructor\u001b[m\n def build_from_crawler(objcls, crawler, /, *args, **kwargs):\u001b[m\n\u001b[31m-    if crawler is None: \u001b[m\n\u001b[32m+\u001b[m\u001b[32m    if crawler is None:\u001b[m\n         raise ValueError(\"Specify crawler.\")\u001b[m\n     if crawler and hasattr(objcls, \"from_crawler\"):\u001b[m\n         instance = objcls.from_crawler(crawler, *args, **kwargs)\u001b[m\n\u001b[36m@@ -188,14 +190,14 @@\u001b[m \u001b[mdef build_from_crawler(objcls, crawler, /, *args, **kwargs):\u001b[m\n     if instance is None:\u001b[m\n         raise TypeError(f\"{objcls.__qualname__}.{method_name} returned None\")\u001b[m\n     return instance\u001b[m\n\u001b[31m-            \u001b[m\n \u001b[m\n\u001b[31m-#``*args`` and ``**kwargs`` are forwarded to the constructors.\u001b[m\n\u001b[32m+\u001b[m\n\u001b[32m+\u001b[m\u001b[32m# ``*args`` and ``**kwargs`` are forwarded to the constructors.\u001b[m\n # Raises ``ValueError`` if``settings`` is``None``.\u001b[m\n #  Raises typeError is instance is None\u001b[m\n # Creates a class instance using 'from_settings' constructor\u001b[m\n def build_from_settings(objcls, settings, /, *args, **kwargs):\u001b[m\n\u001b[31m-    if settings is None: \u001b[m\n\u001b[32m+\u001b[m\u001b[32m    if settings is None:\u001b[m\n         raise ValueError(\"Specify settings.\")\u001b[m\n     if settings and hasattr(objcls, \"from_settings\"):\u001b[m\n         instance = objcls.from_settings(settings, *args, **kwargs)\u001b[m\n\u001b[36m@@ -208,8 +210,6 @@\u001b[m \u001b[mdef build_from_settings(objcls, settings, /, *args, **kwargs):\u001b[m\n     return instance\u001b[m\n \u001b[m\n \u001b[m\n\u001b[31m-\u001b[m\n\u001b[31m-\u001b[m\n @contextmanager\u001b[m\n def set_environ(**kwargs: str) -> Generator[None, Any, None]:\u001b[m\n     \"\"\"Temporarily set environment variables inside the context manager and\u001b[m\n\u001b[1mdiff --git a/tests/test_downloader_handlers.py b/tests/test_downloader_handlers.py\u001b[m\n\u001b[1mindex 8595e68..37a5364 100644\u001b[m\n\u001b[1m--- a/tests/test_downloader_handlers.py\u001b[m\n\u001b[1m+++ b/tests/test_downloader_handlers.py\u001b[m\n\u001b[36m@@ -669,9 +669,7 @@\u001b[m \u001b[mclass Https11CustomCiphers(unittest.TestCase):\u001b[m\n         crawler = get_crawler(\u001b[m\n             settings_dict={\"DOWNLOADER_CLIENT_TLS_CIPHERS\": \"CAMELLIA256-SHA\"}\u001b[m\n         )\u001b[m\n\u001b[31m-        self.download_handler = build_from_crawler(\u001b[m\n\u001b[31m-            self.download_handler_cls, crawler\u001b[m\n\u001b[31m-        )\u001b[m\n\u001b[32m+\u001b[m\u001b[32m        self.download_handler = build_from_crawler(self.download_handler_cls, crawler)\u001b[m\n         self.download_request = self.download_handler.download_request\u001b[m\n \u001b[m\n     @defer.inlineCallbacks\u001b[m\n\u001b[36m@@ -1036,9 +1034,7 @@\u001b[m \u001b[mclass BaseFTPTestCase(unittest.TestCase):\u001b[m\n         self.port = reactor.listenTCP(0, self.factory, interface=\"127.0.0.1\")\u001b[m\n         self.portNum = self.port.getHost().port\u001b[m\n         crawler = get_crawler()\u001b[m\n\u001b[31m-        self.download_handler = build_from_crawler(\u001b[m\n\u001b[31m-            FTPDownloadHandler, crawler\u001b[m\n\u001b[31m-        )\u001b[m\n\u001b[32m+\u001b[m\u001b[32m        self.download_handler = build_from_crawler(FTPDownloadHandler, crawler)\u001b[m\n         self.addCleanup(self.port.stopListening)\u001b[m\n \u001b[m\n     def tearDown(self):\u001b[m\n\u001b[36m@@ -1182,9 +1178,7 @@\u001b[m \u001b[mclass AnonymousFTPTestCase(BaseFTPTestCase):\u001b[m\n         self.port = reactor.listenTCP(0, self.factory, interface=\"127.0.0.1\")\u001b[m\n         self.portNum = self.port.getHost().port\u001b[m\n         crawler = get_crawler()\u001b[m\n\u001b[31m-        self.download_handler = build_from_crawler(\u001b[m\n\u001b[31m-            FTPDownloadHandler, crawler\u001b[m\n\u001b[31m-        )\u001b[m\n\u001b[32m+\u001b[m\u001b[32m        self.download_handler = build_from_crawler(FTPDownloadHandler, crawler)\u001b[m\n         self.addCleanup(self.port.stopListening)\u001b[m\n \u001b[m\n     def tearDown(self):\u001b[m\n\u001b[36m@@ -1194,9 +1188,7 @@\u001b[m \u001b[mclass AnonymousFTPTestCase(BaseFTPTestCase):\u001b[m\n class DataURITestCase(unittest.TestCase):\u001b[m\n     def setUp(self):\u001b[m\n         crawler = get_crawler()\u001b[m\n\u001b[31m-        self.download_handler = build_from_crawler(\u001b[m\n\u001b[31m-            DataURIDownloadHandler, crawler\u001b[m\n\u001b[31m-        )\u001b[m\n\u001b[32m+\u001b[m\u001b[32m        self.download_handler = build_from_crawler(DataURIDownloadHandler, crawler)\u001b[m\n         self.download_request = self.download_handler.download_request\u001b[m\n         self.spider = Spider(\"foo\")\u001b[m\n \u001b[m\n\u001b[1mdiff --git a/tests/test_utils_misc/__init__.py b/tests/test_utils_misc/__init__.py\u001b[m\n\u001b[1mindex ccf8022..115ee7e 100644\u001b[m\n\u001b[1m--- a/tests/test_utils_misc/__init__.py\u001b[m\n\u001b[1m+++ b/tests/test_utils_misc/__init__.py\u001b[m\n\u001b[36m@@ -95,6 +95,7 @@\u001b[m \u001b[mclass UtilsMiscTestCase(unittest.TestCase):\u001b[m\n         self.assertEqual(\u001b[m\n             list(arg_to_iter(TestItem(name=\"john\"))), [TestItem(name=\"john\")]\u001b[m\n         )\u001b[m\n\u001b[32m+\u001b[m\n     def test_build_from_crawler(self):\u001b[m\n         crawler = mock.MagicMock(spec_set=[\"settings\"])\u001b[m\n         args = (True, 100.0)\u001b[m\n\u001b[36m@@ -130,7 +131,7 @@\u001b[m \u001b[mclass UtilsMiscTestCase(unittest.TestCase):\u001b[m\n         m.from_crawler.return_value = None\u001b[m\n         with self.assertRaises(TypeError):\u001b[m\n             build_from_crawler(m, crawler, *args, **kwargs)\u001b[m\n\u001b[31m-    \u001b[m\n\u001b[32m+\u001b[m\n     def test_build_from_settings(self):\u001b[m\n         settings = mock.MagicMock()\u001b[m\n         args = (True, 100.0)\u001b[m\n##[error]Process completed with exit code 1.\n"}, {"step_name": "checks (3.12, pylint)/4_Run check.txt", "log": "##[group]Run pip install -U tox\n\u001b[36;1mpip install -U tox\u001b[0m\n\u001b[36;1mtox\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n  pythonLocation: /opt/hostedtoolcache/Python/3.12.0/x64\n  PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.12.0/x64/lib/pkgconfig\n  Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.0/x64\n  Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.0/x64\n  Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.0/x64\n  LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.12.0/x64/lib\n  TOXENV: pylint\n##[endgroup]\nCollecting tox\n  Downloading tox-4.11.4-py3-none-any.whl.metadata (5.0 kB)\nCollecting cachetools>=5.3.1 (from tox)\n  Downloading cachetools-5.3.2-py3-none-any.whl.metadata (5.2 kB)\nCollecting chardet>=5.2 (from tox)\n  Downloading chardet-5.2.0-py3-none-any.whl.metadata (3.4 kB)\nCollecting colorama>=0.4.6 (from tox)\n  Downloading colorama-0.4.6-py2.py3-none-any.whl (25 kB)\nCollecting filelock>=3.12.3 (from tox)\n  Downloading filelock-3.13.1-py3-none-any.whl.metadata (2.8 kB)\nCollecting packaging>=23.1 (from tox)\n  Downloading packaging-23.2-py3-none-any.whl.metadata (3.2 kB)\nCollecting platformdirs>=3.10 (from tox)\n  Downloading platformdirs-4.1.0-py3-none-any.whl.metadata (11 kB)\nCollecting pluggy>=1.3 (from tox)\n  Downloading pluggy-1.3.0-py3-none-any.whl.metadata (4.3 kB)\nCollecting pyproject-api>=1.6.1 (from tox)\n  Downloading pyproject_api-1.6.1-py3-none-any.whl.metadata (2.8 kB)\nCollecting virtualenv>=20.24.3 (from tox)\n  Downloading virtualenv-20.25.0-py3-none-any.whl.metadata (4.5 kB)\nCollecting distlib<1,>=0.3.7 (from virtualenv>=20.24.3->tox)\n  Downloading distlib-0.3.7-py2.py3-none-any.whl.metadata (5.1 kB)\nDownloading tox-4.11.4-py3-none-any.whl (153 kB)\n   \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 154.0/154.0 kB 24.9 MB/s eta 0:00:00\nDownloading cachetools-5.3.2-py3-none-any.whl (9.3 kB)\nDownloading chardet-5.2.0-py3-none-any.whl (199 kB)\n   \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 199.4/199.4 kB 43.0 MB/s eta 0:00:00\nDownloading filelock-3.13.1-py3-none-any.whl (11 kB)\nDownloading packaging-23.2-py3-none-any.whl (53 kB)\n   \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 53.0/53.0 kB 15.5 MB/s eta 0:00:00\nDownloading platformdirs-4.1.0-py3-none-any.whl (17 kB)\nDownloading pluggy-1.3.0-py3-none-any.whl (18 kB)\nDownloading pyproject_api-1.6.1-py3-none-any.whl (12 kB)\nDownloading virtualenv-20.25.0-py3-none-any.whl (3.8 MB)\n   \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 3.8/3.8 MB 86.7 MB/s eta 0:00:00\nDownloading distlib-0.3.7-py2.py3-none-any.whl (468 kB)\n   \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 468.9/468.9 kB 75.2 MB/s eta 0:00:00\nInstalling collected packages: distlib, pluggy, platformdirs, packaging, filelock, colorama, chardet, cachetools, virtualenv, pyproject-api, tox\nSuccessfully installed cachetools-5.3.2 chardet-5.2.0 colorama-0.4.6 distlib-0.3.7 filelock-3.13.1 packaging-23.2 platformdirs-4.1.0 pluggy-1.3.0 pyproject-api-1.6.1 tox-4.11.4 virtualenv-20.25.0\n\n[notice] A new release of pip is available: 23.2.1 -> 23.3.1\n[notice] To update, run: pip install --upgrade pip\npylint: install_deps> python -I -m pip install -ctests/upper-constraints.txt boto3 google-cloud-storage 'markupsafe<2.1.0' 'mitmproxy<8,>=4.0.4; python_version < \"3.9\" and implementation_name != \"pypy\"' Pillow pylint==3.0.1 robotexclusionrulesparser 'Twisted[http2]' -r tests/requirements.txt\n.pkg: install_requires> python -I -m pip install 'setuptools>=40.8.0' wheel\n.pkg: _optional_hooks> python /opt/hostedtoolcache/Python/3.12.0/x64/lib/python3.12/site-packages/pyproject_api/_backend.py True setuptools.build_meta __legacy__\n.pkg: get_requires_for_build_sdist> python /opt/hostedtoolcache/Python/3.12.0/x64/lib/python3.12/site-packages/pyproject_api/_backend.py True setuptools.build_meta __legacy__\n.pkg: get_requires_for_build_wheel> python /opt/hostedtoolcache/Python/3.12.0/x64/lib/python3.12/site-packages/pyproject_api/_backend.py True setuptools.build_meta __legacy__\n.pkg: install_requires_for_build_wheel> python -I -m pip install wheel\n.pkg: freeze> python -m pip freeze --all\n.pkg: pip==23.3.1,setuptools==69.0.2,wheel==0.42.0\n.pkg: prepare_metadata_for_build_wheel> python /opt/hostedtoolcache/Python/3.12.0/x64/lib/python3.12/site-packages/pyproject_api/_backend.py True setuptools.build_meta __legacy__\n.pkg: build_sdist> python /opt/hostedtoolcache/Python/3.12.0/x64/lib/python3.12/site-packages/pyproject_api/_backend.py True setuptools.build_meta __legacy__\npylint: install_package_deps> python -I -m pip install -ctests/upper-constraints.txt 'PyDispatcher>=2.0.5; platform_python_implementation == \"CPython\"' 'PyPyDispatcher>=2.1.0; platform_python_implementation == \"PyPy\"' 'Twisted>=18.9.0' 'cryptography>=36.0.0' 'cssselect>=0.9.1' 'itemadapter>=0.1.0' 'itemloaders>=1.0.1' 'lxml>=4.4.1' packaging 'parsel>=1.5.0' 'protego>=0.1.15' 'pyOpenSSL>=21.0.0' 'queuelib>=1.4.2' 'service-identity>=18.1.0' setuptools tldextract 'w3lib>=1.17.0' 'zope.interface>=5.1.0'\npylint: install_package> python -I -m pip install -ctests/upper-constraints.txt --force-reinstall --no-deps /home/runner/work/scrapy/scrapy/.tox/.tmp/package/1/Scrapy-2.11.0.tar.gz\npylint: freeze> python -m pip freeze --all\npylint: astroid==3.0.1,asttokens==2.4.1,attrs==23.1.0,Automat==22.10.0,blessed==1.20.0,boto3==1.33.7,botocore==1.33.7,bpython==0.24,Brotli==1.1.0,cachetools==5.3.2,certifi==2023.11.17,cffi==1.16.0,charset-normalizer==3.3.2,constantly==23.10.4,coverage==7.3.2,cryptography==41.0.7,cssselect==1.2.0,curtsies==0.4.2,cwcwidth==0.1.9,decorator==5.1.1,dill==0.3.7,execnet==2.0.2,executing==2.0.1,filelock==3.13.1,google-api-core==2.14.0,google-auth==2.24.0,google-cloud-core==2.3.3,google-cloud-storage==2.13.0,google-crc32c==1.5.0,google-resumable-media==2.6.0,googleapis-common-protos==1.61.0,greenlet==3.0.1,h2==4.1.0,hpack==4.0.0,hyperframe==6.0.1,hyperlink==21.0.0,idna==3.6,incremental==22.10.0,iniconfig==2.0.0,ipython==8.18.1,isort==5.12.0,itemadapter==0.8.0,itemloaders==1.1.0,jedi==0.19.1,jmespath==1.0.1,lxml==4.9.3,MarkupSafe==2.0.1,matplotlib-inline==0.1.6,mccabe==0.7.0,packaging==23.2,parsel==1.8.1,parso==0.8.3,pexpect==4.9.0,Pillow==10.1.0,pip==23.3.1,platformdirs==4.1.0,pluggy==1.3.0,priority==1.3.0,prompt-toolkit==3.0.41,Protego==0.3.0,protobuf==4.25.1,ptyprocess==0.7.0,pure-eval==0.2.2,pyasn1==0.5.1,pyasn1-modules==0.3.0,pycparser==2.21,PyDispatcher==2.0.7,pyftpdlib==1.5.9,Pygments==2.17.2,pylint==3.0.1,pyOpenSSL==23.3.0,pytest==7.4.3,pytest-cov==4.0.0,pytest-xdist==3.5.0,python-dateutil==2.8.2,pyxdg==0.28,queuelib==1.6.2,requests==2.31.0,requests-file==1.5.1,robotexclusionrulesparser==1.7.1,rsa==4.9,s3transfer==0.8.2,Scrapy @ file:///home/runner/work/scrapy/scrapy/.tox/.tmp/package/1/Scrapy-2.11.0.tar.gz#sha256=95dfd13c5691d88425ffee44c03ee9a12098077f4caac9cff2b4fc70ff9404ec,service-identity==23.1.0,setuptools==69.0.2,six==1.16.0,stack-data==0.6.3,sybil==6.0.2,testfixtures==7.2.2,tldextract==5.1.1,tomlkit==0.12.3,traitlets==5.14.0,Twisted==23.10.0,typing_extensions==4.8.0,urllib3==2.0.7,uvloop==0.19.0,w3lib==2.1.2,wcwidth==0.2.12,zope.interface==6.1,zstandard==0.22.0\npylint: commands[0]> pylint conftest.py docs extras scrapy setup.py tests\n<unknown>:230: SyntaxWarning: invalid escape sequence '\\d'\n************* Module tests.test_addons\ntests/test_addons.py:112:40: E0001: Parsing failed: 'unindent does not match any outer indentation level (<unknown>, line 112)' (syntax-error)\n************* Module scrapy.addons\nscrapy/addons.py:36:24: W1117: 'settings' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\n************* Module scrapy.crawler\nscrapy/crawler.py:113:37: W1117: 'settings' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\n************* Module scrapy.core.engine\nscrapy/core/engine.py:361:20: W1117: 'crawler' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\n************* Module scrapy.core.scheduler\nscrapy/core/scheduler.py:325:15: W1117: 'crawler' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\nscrapy/core/scheduler.py:336:12: W1117: 'crawler' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\n************* Module scrapy.core.downloader.contextfactory\nscrapy/core/downloader/contextfactory.py:169:26: W1117: 'settings' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\nscrapy/core/downloader/contextfactory.py:181:26: W1117: 'settings' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\n************* Module scrapy.core.downloader.handlers\nscrapy/core/downloader/handlers/__init__.py:59:17: W1117: 'settings' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\n************* Module scrapy.core.downloader.handlers.http10\nscrapy/core/downloader/handlers/http10.py:34:41: W1117: 'objcls' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\nscrapy/core/downloader/handlers/http10.py:34:41: W1117: 'crawler' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\nscrapy/core/downloader/handlers/http10.py:39:41: W1117: 'objcls' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\nscrapy/core/downloader/handlers/http10.py:39:41: W1117: 'settings' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\n************* Module scrapy.core.downloader.handlers.s3\nscrapy/core/downloader/handlers/s3.py:54:28: W1117: 'objcls' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\nscrapy/core/downloader/handlers/s3.py:54:28: W1117: 'crawler' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\nscrapy/core/downloader/handlers/s3.py:59:28: W1117: 'objcls' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\nscrapy/core/downloader/handlers/s3.py:59:28: W1117: 'settings' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\n************* Module scrapy.utils.misc\nscrapy/utils/misc.py:51:16: C0303: Trailing whitespace (trailing-whitespace)\nscrapy/utils/misc.py:55:30: C0303: Trailing whitespace (trailing-whitespace)\nscrapy/utils/misc.py:57:11: C0303: Trailing whitespace (trailing-whitespace)\nscrapy/utils/misc.py:180:23: C0303: Trailing whitespace (trailing-whitespace)\nscrapy/utils/misc.py:191:0: C0303: Trailing whitespace (trailing-whitespace)\nscrapy/utils/misc.py:198:24: C0303: Trailing whitespace (trailing-whitespace)\n************* Module tests.test_downloader_handlers\ntests/test_downloader_handlers.py:833:22: W1117: 'objcls' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\ntests/test_downloader_handlers.py:833:22: W1117: 'crawler' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\ntests/test_downloader_handlers.py:863:17: W1117: 'objcls' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\ntests/test_downloader_handlers.py:863:17: W1117: 'crawler' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\ntests/test_downloader_handlers.py:890:12: W1117: 'objcls' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\ntests/test_downloader_handlers.py:890:12: W1117: 'crawler' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\n************* Module tests.test_utils_misc.__init__\ntests/test_utils_misc/__init__.py:133:0: C0303: Trailing whitespace (trailing-whitespace)\n\n-----------------------------------\nYour code has been rated at 9.99/10\n\npylint: exit 22 (57.23 seconds) /home/runner/work/scrapy/scrapy> pylint conftest.py docs extras scrapy setup.py tests pid=2046\n.pkg: _exit> python /opt/hostedtoolcache/Python/3.12.0/x64/lib/python3.12/site-packages/pyproject_api/_backend.py True setuptools.build_meta __legacy__\n  pylint: FAIL code 22 (101.48=setup[44.25]+cmd[57.23] seconds)\n  evaluation failed :( (101.60 seconds)\n##[error]Process completed with exit code 22.\n"}, {"step_name": "checks (3.8, typing)/4_Run check.txt", "log": "##[group]Run pip install -U tox\n\u001b[36;1mpip install -U tox\u001b[0m\n\u001b[36;1mtox\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n  pythonLocation: /opt/hostedtoolcache/Python/3.8.18/x64\n  PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.8.18/x64/lib/pkgconfig\n  Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n  Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n  Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n  LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.8.18/x64/lib\n  TOXENV: typing\n##[endgroup]\nCollecting tox\n  Downloading tox-4.11.4-py3-none-any.whl.metadata (5.0 kB)\nCollecting cachetools>=5.3.1 (from tox)\n  Downloading cachetools-5.3.2-py3-none-any.whl.metadata (5.2 kB)\nCollecting chardet>=5.2 (from tox)\n  Downloading chardet-5.2.0-py3-none-any.whl.metadata (3.4 kB)\nCollecting colorama>=0.4.6 (from tox)\n  Downloading colorama-0.4.6-py2.py3-none-any.whl (25 kB)\nCollecting filelock>=3.12.3 (from tox)\n  Downloading filelock-3.13.1-py3-none-any.whl.metadata (2.8 kB)\nCollecting packaging>=23.1 (from tox)\n  Downloading packaging-23.2-py3-none-any.whl.metadata (3.2 kB)\nCollecting platformdirs>=3.10 (from tox)\n  Downloading platformdirs-4.1.0-py3-none-any.whl.metadata (11 kB)\nCollecting pluggy>=1.3 (from tox)\n  Downloading pluggy-1.3.0-py3-none-any.whl.metadata (4.3 kB)\nCollecting pyproject-api>=1.6.1 (from tox)\n  Downloading pyproject_api-1.6.1-py3-none-any.whl.metadata (2.8 kB)\nCollecting tomli>=2.0.1 (from tox)\n  Downloading tomli-2.0.1-py3-none-any.whl (12 kB)\nCollecting virtualenv>=20.24.3 (from tox)\n  Downloading virtualenv-20.25.0-py3-none-any.whl.metadata (4.5 kB)\nCollecting distlib<1,>=0.3.7 (from virtualenv>=20.24.3->tox)\n  Downloading distlib-0.3.7-py2.py3-none-any.whl.metadata (5.1 kB)\nDownloading tox-4.11.4-py3-none-any.whl (153 kB)\n   \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 154.0/154.0 kB 18.9 MB/s eta 0:00:00\nDownloading cachetools-5.3.2-py3-none-any.whl (9.3 kB)\nDownloading chardet-5.2.0-py3-none-any.whl (199 kB)\n   \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 199.4/199.4 kB 47.2 MB/s eta 0:00:00\nDownloading filelock-3.13.1-py3-none-any.whl (11 kB)\nDownloading packaging-23.2-py3-none-any.whl (53 kB)\n   \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 53.0/53.0 kB 15.7 MB/s eta 0:00:00\nDownloading platformdirs-4.1.0-py3-none-any.whl (17 kB)\nDownloading pluggy-1.3.0-py3-none-any.whl (18 kB)\nDownloading pyproject_api-1.6.1-py3-none-any.whl (12 kB)\nDownloading virtualenv-20.25.0-py3-none-any.whl (3.8 MB)\n   \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 3.8/3.8 MB 102.9 MB/s eta 0:00:00\nDownloading distlib-0.3.7-py2.py3-none-any.whl (468 kB)\n   \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 468.9/468.9 kB 75.6 MB/s eta 0:00:00\nInstalling collected packages: distlib, tomli, pluggy, platformdirs, packaging, filelock, colorama, chardet, cachetools, virtualenv, pyproject-api, tox\nSuccessfully installed cachetools-5.3.2 chardet-5.2.0 colorama-0.4.6 distlib-0.3.7 filelock-3.13.1 packaging-23.2 platformdirs-4.1.0 pluggy-1.3.0 pyproject-api-1.6.1 tomli-2.0.1 tox-4.11.4 virtualenv-20.25.0\n\n[notice] A new release of pip is available: 23.0.1 -> 23.3.1\n[notice] To update, run: pip install --upgrade pip\ntyping: install_deps> python -I -m pip install -ctests/upper-constraints.txt mypy==1.6.1 types-attrs==19.1.0 types-lxml==2023.10.21 types-Pillow==10.1.0.0 types-Pygments==2.16.0.0 types-pyOpenSSL==23.3.0.0 types-setuptools==68.2.0.0 typing-extensions==4.8.0 'w3lib>=2.1.2'\n.pkg: install_requires> python -I -m pip install 'setuptools>=40.8.0' wheel\n.pkg: _optional_hooks> python /opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/site-packages/pyproject_api/_backend.py True setuptools.build_meta __legacy__\n.pkg: get_requires_for_build_sdist> python /opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/site-packages/pyproject_api/_backend.py True setuptools.build_meta __legacy__\n.pkg: get_requires_for_build_wheel> python /opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/site-packages/pyproject_api/_backend.py True setuptools.build_meta __legacy__\n.pkg: install_requires_for_build_wheel> python -I -m pip install wheel\n.pkg: freeze> python -m pip freeze --all\n.pkg: pip==23.3.1,setuptools==69.0.2,wheel==0.42.0\n.pkg: prepare_metadata_for_build_wheel> python /opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/site-packages/pyproject_api/_backend.py True setuptools.build_meta __legacy__\n.pkg: build_sdist> python /opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/site-packages/pyproject_api/_backend.py True setuptools.build_meta __legacy__\ntyping: install_package_deps> python -I -m pip install -ctests/upper-constraints.txt 'PyDispatcher>=2.0.5; platform_python_implementation == \"CPython\"' 'PyPyDispatcher>=2.1.0; platform_python_implementation == \"PyPy\"' 'Twisted>=18.9.0' 'cryptography>=36.0.0' 'cssselect>=0.9.1' 'itemadapter>=0.1.0' 'itemloaders>=1.0.1' 'lxml>=4.4.1' packaging 'parsel>=1.5.0' 'protego>=0.1.15' 'pyOpenSSL>=21.0.0' 'queuelib>=1.4.2' 'service-identity>=18.1.0' setuptools tldextract 'w3lib>=1.17.0' 'zope.interface>=5.1.0'\ntyping: install_package> python -I -m pip install -ctests/upper-constraints.txt --force-reinstall --no-deps /home/runner/work/scrapy/scrapy/.tox/.tmp/package/1/Scrapy-2.11.0.tar.gz\ntyping: freeze> python -m pip freeze --all\ntyping: attrs==23.1.0,Automat==22.10.0,certifi==2023.11.17,cffi==1.16.0,charset-normalizer==3.3.2,constantly==23.10.4,cryptography==41.0.7,cssselect==1.2.0,filelock==3.13.1,hyperlink==21.0.0,idna==3.6,incremental==22.10.0,itemadapter==0.8.0,itemloaders==1.1.0,jmespath==1.0.1,lxml==4.9.3,mypy==1.6.1,mypy-extensions==1.0.0,packaging==23.2,parsel==1.8.1,pip==23.3.1,Protego==0.3.0,pyasn1==0.5.1,pyasn1-modules==0.3.0,pycparser==2.21,PyDispatcher==2.0.7,pyOpenSSL==23.3.0,queuelib==1.6.2,requests==2.31.0,requests-file==1.5.1,Scrapy @ file:///home/runner/work/scrapy/scrapy/.tox/.tmp/package/1/Scrapy-2.11.0.tar.gz#sha256=10ffed5c2108f38048c2270acaf9e181014495f11170991e45b186156308910b,service-identity==23.1.0,setuptools==69.0.2,six==1.16.0,tldextract==5.1.1,tomli==2.0.1,Twisted==23.10.0,types-attrs==19.1.0,types-beautifulsoup4==4.12.0.7,types-docutils==0.20.0.3,types-html5lib==1.1.11.15,types-lxml==2023.10.21,types-Pillow==10.1.0.0,types-Pygments==2.16.0.0,types-pyOpenSSL==23.3.0.0,types-setuptools==68.2.0.0,typing_extensions==4.8.0,urllib3==2.1.0,w3lib==2.1.2,wheel==0.42.0,zope.interface==6.1\ntyping: commands[0]> mypy scrapy tests\ntests/test_addons.py:112: error: unindent does not match any outer indentation level  [syntax]\nFound 1 error in 1 file (errors prevented further checking)\ntyping: exit 2 (0.47 seconds) /home/runner/work/scrapy/scrapy> mypy scrapy tests pid=1974\n.pkg: _exit> python /opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/site-packages/pyproject_api/_backend.py True setuptools.build_meta __legacy__\n  typing: FAIL code 2 (17.46=setup[16.99]+cmd[0.47] seconds)\n  evaluation failed :( (17.55 seconds)\n##[error]Process completed with exit code 2.\n"}], "diff": "diff --git a/scrapy/addons.py b/scrapy/addons.py\nindex a6efba362..cb1574b2f 100644\n--- a/scrapy/addons.py\n+++ b/scrapy/addons.py\n@@ -33,9 +33,7 @@ class AddonManager:\n             try:\n                 addoncls = load_object(clspath)\n                 # changes create_instance call to build_from_settings\n-                addon = build_from_settings(\n-                    addoncls, settings=settings\n-                )\n+                addon = build_from_settings(addoncls, settings=settings)\n                 addon.update_settings(settings)\n                 self.addons.append(addon)\n             except NotConfigured as e:\ndiff --git a/scrapy/core/downloader/contextfactory.py b/scrapy/core/downloader/contextfactory.py\nindex 6d3ddd4a4..73a3d32af 100644\n--- a/scrapy/core/downloader/contextfactory.py\n+++ b/scrapy/core/downloader/contextfactory.py\n@@ -178,9 +178,7 @@ def load_context_factory_from_settings(settings, crawler):\n     except TypeError:\n         # use context factory defaults\n         # changes create_instance call to build_from_settings\n-        context_factory = build_from_settings(\n-            context_factory_cls, settings = settings\n-        )\n+        context_factory = build_from_settings(context_factory_cls, settings=settings)\n         # context_factory = create_instance(\n         #     objcls=context_factory_cls,\n         #     settings=settings,\ndiff --git a/scrapy/core/downloader/handlers/__init__.py b/scrapy/core/downloader/handlers/__init__.py\nindex d5b8ce89d..fc597b017 100644\n--- a/scrapy/core/downloader/handlers/__init__.py\n+++ b/scrapy/core/downloader/handlers/__init__.py\n@@ -56,9 +56,7 @@ class DownloadHandlers:\n             if skip_lazy and getattr(dhcls, \"lazy\", True):\n                 return None\n             # change create_instance call to build_from_settings\n-            dh = build_from_settings(\n-                dhcls, settings=self._crawler.settings\n-            )\n+            dh = build_from_settings(dhcls, settings=self._crawler.settings)\n             # dh = create_instance(\n             #     objcls=dhcls,\n             #     settings=self._crawler.settings,\ndiff --git a/scrapy/core/downloader/handlers/http10.py b/scrapy/core/downloader/handlers/http10.py\nindex 76faf7d2b..5c01ba410 100644\n--- a/scrapy/core/downloader/handlers/http10.py\n+++ b/scrapy/core/downloader/handlers/http10.py\n@@ -1,6 +1,6 @@\n \"\"\"Download handlers for http and https schemes\n \"\"\"\n-from scrapy.utils.misc import build_from_settings, build_from_crawler, load_object\n+from scrapy.utils.misc import build_from_crawler, build_from_settings, load_object\n from scrapy.utils.python import to_unicode\n \n \ndiff --git a/scrapy/core/engine.py b/scrapy/core/engine.py\nindex 81aacdf8d..281dc8a54 100644\n--- a/scrapy/core/engine.py\n+++ b/scrapy/core/engine.py\n@@ -358,9 +358,7 @@ class ExecutionEngine:\n             raise RuntimeError(f\"No free spider slot when opening {spider.name!r}\")\n         logger.info(\"Spider opened\", extra={\"spider\": spider})\n         nextcall = CallLaterOnce(self._next_request)\n-        scheduler = build_from_crawler(\n-            self.scheduler_cls, crawler=self.crawler\n-        )\n+        scheduler = build_from_crawler(self.scheduler_cls, crawler=self.crawler)\n         start_requests = yield self.scraper.spidermw.process_start_requests(\n             start_requests, spider\n         )\ndiff --git a/scrapy/crawler.py b/scrapy/crawler.py\nindex a1f699b7e..128095368 100644\n--- a/scrapy/crawler.py\n+++ b/scrapy/crawler.py\n@@ -111,7 +111,8 @@ class Crawler:\n \n         # changes create_instance call to build_from_settings\n         self.request_fingerprinter = build_from_settings(\n-            load_object(self.settings[\"REQUEST_FINGERPRINTER_CLASS\"]), settings=self.settings\n+            load_object(self.settings[\"REQUEST_FINGERPRINTER_CLASS\"]),\n+            settings=self.settings,\n         )\n \n         # self.request_fingerprinter = create_instance(\ndiff --git a/scrapy/extensions/feedexport.py b/scrapy/extensions/feedexport.py\nindex a4c8a470d..62fb07e19 100644\n--- a/scrapy/extensions/feedexport.py\n+++ b/scrapy/extensions/feedexport.py\n@@ -28,7 +28,7 @@ from scrapy.utils.defer import maybe_deferred_to_future\n from scrapy.utils.deprecate import create_deprecated_class\n from scrapy.utils.ftp import ftp_store_file\n from scrapy.utils.log import failure_to_exc_info\n-from scrapy.utils.misc import build_from_settings, build_from_crawler, load_object\n+from scrapy.utils.misc import build_from_crawler, build_from_settings, load_object\n from scrapy.utils.python import without_none_values\n \n logger = logging.getLogger(__name__)\ndiff --git a/scrapy/utils/misc.py b/scrapy/utils/misc.py\nindex 613328a2b..2e39aaaa3 100644\n--- a/scrapy/utils/misc.py\n+++ b/scrapy/utils/misc.py\n@@ -47,19 +47,21 @@ def arg_to_iter(arg: Any) -> Iterable[Any]:\n         return cast(Iterable[Any], arg)\n     return [arg]\n \n+\n # def build_from_crawler(objcls, crawler, none, *args, **kwargs):\n-#     if crawler \n+#     if crawler\n #         if hasattr(objcls, \"from_crawler\"):\n #             instance = objcls.from_crawler(crawler, *args, **kwargs)\n #             method_name = \"from_crawler\"\n-#         if instance is None: \n+#         if instance is None:\n #             raise TypeError(f\"{objcls.__qualname__}.{method_name} returned None)\n-#     else: \n+#     else:\n #         instance = objcls(*args, **kwargs)\n #         method_name = \"__new__\"\n \n #     return instance\n \n+\n def load_object(path: Union[str, Callable]) -> Any:\n     \"\"\"Load an object given its absolute object path, and return it.\n \n@@ -176,8 +178,8 @@ def rel_has_nofollow(rel: Optional[str]) -> bool:\n # Raises ``ValueError`` if``crawler`` is``None``.\n #  Raises typeError is instance is None\n # Creates a class instance using 'from_crawler' constructor\n-def build_from_crawler(objcls, crawler, /, *args, **kwargs):\n-    if crawler is None: \n+def build_from_crawler(objcls, crawler, *args, **kwargs):\n+    if crawler is None:\n         raise ValueError(\"Specify crawler.\")\n     if crawler and hasattr(objcls, \"from_crawler\"):\n         instance = objcls.from_crawler(crawler, *args, **kwargs)\n@@ -188,14 +190,14 @@ def build_from_crawler(objcls, crawler, /, *args, **kwargs):\n     if instance is None:\n         raise TypeError(f\"{objcls.__qualname__}.{method_name} returned None\")\n     return instance\n-            \n \n-#``*args`` and ``**kwargs`` are forwarded to the constructors.\n+\n+# ``*args`` and ``**kwargs`` are forwarded to the constructors.\n # Raises ``ValueError`` if``settings`` is``None``.\n #  Raises typeError is instance is None\n # Creates a class instance using 'from_settings' constructor\n-def build_from_settings(objcls, settings, /, *args, **kwargs):\n-    if settings is None: \n+def build_from_settings(objcls, settings, *args, **kwargs):\n+    if settings is None:\n         raise ValueError(\"Specify settings.\")\n     if settings and hasattr(objcls, \"from_settings\"):\n         instance = objcls.from_settings(settings, *args, **kwargs)\n@@ -208,8 +210,6 @@ def build_from_settings(objcls, settings, /, *args, **kwargs):\n     return instance\n \n \n-\n-\n @contextmanager\n def set_environ(**kwargs: str) -> Generator[None, Any, None]:\n     \"\"\"Temporarily set environment variables inside the context manager and\ndiff --git a/tests/test_addons.py b/tests/test_addons.py\nindex 355539162..5e390c49d 100644\n--- a/tests/test_addons.py\n+++ b/tests/test_addons.py\n@@ -109,7 +109,7 @@ class AddonManagerTest(unittest.TestCase):\n         self.assertIsInstance(manager.addons[0], CreateInstanceAddon)\n         self.assertEqual(crawler.settings.get(\"MYADDON_KEY\"), \"val\")\n \n-     def test_build_from_crawler(self):\n+    def test_build_from_crawler(self):\n         settings_dict = {\n             \"ADDONS\": {\"tests.test_addons.CreateInstanceAddon\": 0},\n             \"MYADDON\": {\"MYADDON_KEY\": \"val\"},\ndiff --git a/tests/test_downloader_handlers.py b/tests/test_downloader_handlers.py\nindex 8595e68c2..37a53643b 100644\n--- a/tests/test_downloader_handlers.py\n+++ b/tests/test_downloader_handlers.py\n@@ -669,9 +669,7 @@ class Https11CustomCiphers(unittest.TestCase):\n         crawler = get_crawler(\n             settings_dict={\"DOWNLOADER_CLIENT_TLS_CIPHERS\": \"CAMELLIA256-SHA\"}\n         )\n-        self.download_handler = build_from_crawler(\n-            self.download_handler_cls, crawler\n-        )\n+        self.download_handler = build_from_crawler(self.download_handler_cls, crawler)\n         self.download_request = self.download_handler.download_request\n \n     @defer.inlineCallbacks\n@@ -1036,9 +1034,7 @@ class BaseFTPTestCase(unittest.TestCase):\n         self.port = reactor.listenTCP(0, self.factory, interface=\"127.0.0.1\")\n         self.portNum = self.port.getHost().port\n         crawler = get_crawler()\n-        self.download_handler = build_from_crawler(\n-            FTPDownloadHandler, crawler\n-        )\n+        self.download_handler = build_from_crawler(FTPDownloadHandler, crawler)\n         self.addCleanup(self.port.stopListening)\n \n     def tearDown(self):\n@@ -1182,9 +1178,7 @@ class AnonymousFTPTestCase(BaseFTPTestCase):\n         self.port = reactor.listenTCP(0, self.factory, interface=\"127.0.0.1\")\n         self.portNum = self.port.getHost().port\n         crawler = get_crawler()\n-        self.download_handler = build_from_crawler(\n-            FTPDownloadHandler, crawler\n-        )\n+        self.download_handler = build_from_crawler(FTPDownloadHandler, crawler)\n         self.addCleanup(self.port.stopListening)\n \n     def tearDown(self):\n@@ -1194,9 +1188,7 @@ class AnonymousFTPTestCase(BaseFTPTestCase):\n class DataURITestCase(unittest.TestCase):\n     def setUp(self):\n         crawler = get_crawler()\n-        self.download_handler = build_from_crawler(\n-            DataURIDownloadHandler, crawler\n-        )\n+        self.download_handler = build_from_crawler(DataURIDownloadHandler, crawler)\n         self.download_request = self.download_handler.download_request\n         self.spider = Spider(\"foo\")\n \ndiff --git a/tests/test_utils_misc/__init__.py b/tests/test_utils_misc/__init__.py\nindex ccf8022c9..0152a1110 100644\n--- a/tests/test_utils_misc/__init__.py\n+++ b/tests/test_utils_misc/__init__.py\n@@ -95,6 +95,7 @@ class UtilsMiscTestCase(unittest.TestCase):\n         self.assertEqual(\n             list(arg_to_iter(TestItem(name=\"john\"))), [TestItem(name=\"john\")]\n         )\n+\n     def test_build_from_crawler(self):\n         crawler = mock.MagicMock(spec_set=[\"settings\"])\n         args = (True, 100.0)\n@@ -122,7 +123,7 @@ class UtilsMiscTestCase(unittest.TestCase):\n         )\n         for specs in spec_sets:\n             m = mock.MagicMock(spec_set=specs)\n-            _test_with_crawler(m, settings, crawler)\n+            _test_with_crawler(m, crawler)\n             m.reset_mock()\n \n         # Check adoption of crawler\n@@ -130,7 +131,7 @@ class UtilsMiscTestCase(unittest.TestCase):\n         m.from_crawler.return_value = None\n         with self.assertRaises(TypeError):\n             build_from_crawler(m, crawler, *args, **kwargs)\n-    \n+\n     def test_build_from_settings(self):\n         settings = mock.MagicMock()\n         args = (True, 100.0)\ndiff --git a/tests/test_webclient.py b/tests/test_webclient.py\nindex 0042fe8f0..a69d9c1b0 100644\n--- a/tests/test_webclient.py\n+++ b/tests/test_webclient.py\n@@ -24,7 +24,7 @@ from scrapy.core.downloader import webclient as client\n from scrapy.core.downloader.contextfactory import ScrapyClientContextFactory\n from scrapy.http import Headers, Request\n from scrapy.settings import Settings\n-from scrapy.utils.misc import create_instance\n+from scrapy.utils.misc import build_from_settings\n from scrapy.utils.python import to_bytes, to_unicode\n from tests.mockserver import (\n     BrokenDownloadResource,\n@@ -470,8 +470,8 @@ class WebClientCustomCiphersSSLTestCase(WebClientSSLTestCase):\n     def testPayload(self):\n         s = \"0123456789\" * 10\n         settings = Settings({\"DOWNLOADER_CLIENT_TLS_CIPHERS\": self.custom_ciphers})\n-        client_context_factory = create_instance(\n-            ScrapyClientContextFactory, settings=settings, crawler=None\n+        client_context_factory = build_from_settings(\n+            ScrapyClientContextFactory, settings=settings\n         )\n         return getPage(\n             self.getURL(\"payload\"), body=s, contextFactory=client_context_factory\n@@ -482,8 +482,8 @@ class WebClientCustomCiphersSSLTestCase(WebClientSSLTestCase):\n         settings = Settings(\n             {\"DOWNLOADER_CLIENT_TLS_CIPHERS\": \"ECDHE-RSA-AES256-GCM-SHA384\"}\n         )\n-        client_context_factory = create_instance(\n-            ScrapyClientContextFactory, settings=settings, crawler=None\n+        client_context_factory = build_from_settings(\n+            ScrapyClientContextFactory, settings=settings\n         )\n         d = getPage(\n             self.getURL(\"payload\"), body=s, contextFactory=client_context_factory\n", "difficulty": 1, "changed_files": ["scrapy/addons.py", "scrapy/core/downloader/contextfactory.py", "scrapy/core/downloader/handlers/__init__.py", "scrapy/core/downloader/handlers/http10.py", "scrapy/core/engine.py", "scrapy/crawler.py", "scrapy/extensions/feedexport.py", "scrapy/utils/misc.py", "tests/test_addons.py", "tests/test_downloader_handlers.py", "tests/test_utils_misc/__init__.py", "tests/test_webclient.py"], "commit_link": "https://github.com/scrapy/scrapy/tree/b15d4bd9177149b88d1b0f719e7e6290df81fe9a"}