galtimur commited on
Commit
11c83cb
1 Parent(s): 2f7fc4f

Upload 78 files

Browse files

Filtered datapoints

This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. data/python/028ad1e.json +1 -0
  2. data/python/03669a5.json +1 -0
  3. data/python/0b08b8e.json +0 -0
  4. data/python/0b93d2d.json +0 -0
  5. data/python/0f71221.json +1 -0
  6. data/python/102f918.json +1 -0
  7. data/python/1168b92.json +1 -0
  8. data/python/16a0c04.json +1 -0
  9. data/python/1afe2c9.json +1 -0
  10. data/python/2201be2.json +1 -0
  11. data/python/2a59b55.json +1 -0
  12. data/python/2ab9e84.json +1 -0
  13. data/python/2c06ffa.json +1 -0
  14. data/python/2f0605c.json +1 -0
  15. data/python/3dd8e44.json +1 -0
  16. data/python/3ed7a88.json +0 -0
  17. data/python/43dd59c.json +1 -0
  18. data/python/4410203.json +1 -0
  19. data/python/44b56e0.json +1 -0
  20. data/python/4d5898b.json +1 -0
  21. data/python/55d2e8d.json +1 -0
  22. data/python/55f8e66.json +0 -0
  23. data/python/5b9b7a0.json +1 -0
  24. data/python/5fea24b.json +1 -0
  25. data/python/616eb3b.json +1 -0
  26. data/python/63ae862.json +0 -0
  27. data/python/655e964.json +1 -0
  28. data/python/66e3eac.json +1 -0
  29. data/python/6819090.json +1 -0
  30. data/python/68ddb25.json +1 -0
  31. data/python/6cbb12e.json +1 -0
  32. data/python/76777e3.json +1 -0
  33. data/python/76e35ec.json +1 -0
  34. data/python/7aa2f79.json +0 -0
  35. data/python/7e3720f.json +1 -0
  36. data/python/7f35134.json +1 -0
  37. data/python/83b5e4b.json +1 -0
  38. data/python/897a5de.json +1 -0
  39. data/python/903a05c.json +0 -0
  40. data/python/9261583.json +1 -0
  41. data/python/92937f3.json +1 -0
  42. data/python/9981ca1.json +1 -0
  43. data/python/99ad8a3.json +1 -0
  44. data/python/9e1aa7b.json +0 -0
  45. data/python/aa8a42b.json +1 -0
  46. data/python/ac842d4.json +1 -0
  47. data/python/af9b76a.json +1 -0
  48. data/python/b15d4bd.json +1 -0
  49. data/python/b4cd344.json +1 -0
  50. data/python/b639adb.json +1 -0
data/python/028ad1e.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"language": "Python", "id": 17, "repo_owner": "huggingface", "repo_name": "accelerate", "head_branch": "AjayP13-patch-1", "workflow_name": "Quality Check", "workflow_filename": "quality.yml", "workflow_path": ".github/workflows/quality.yml", "contributor": "AjayP13", "sha_fail": "028ad1efee2c41691d78e5a4de90ebd6f8236cad", "sha_success": "9ff59024aee19e24948401eb8cc7057602592b49", "workflow": "name: Quality Check\n\non: [pull_request]\n\njobs:\n quality:\n runs-on: ubuntu-latest\n steps:\n - uses: actions/[email protected]\n - name: Set up Python 3.8\n uses: actions/setup-python@v3\n with:\n python-version: 3.8\n - name: Install Python dependencies\n run: pip install -e .[quality]\n - name: Run Quality check\n run: make quality\n - name: Check if failure\n if: ${{ failure() }}\n run: |\n echo \"Quality check failed. Please ensure the right dependency versions are installed with 'pip install -e .[quality]' and rerun 'make style; make quality;'\" >> $GITHUB_STEP_SUMMARY\n\n", "logs": [{"step_name": "quality/5_Run Quality check.txt", "log": "##[group]Run make quality\n\u001b[36;1mmake quality\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.8.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.8.18/x64/lib\n##[endgroup]\nblack --required-version 23 --check tests src examples benchmarks utils\nAll done! \u2728 \ud83c\udf70 \u2728\n118 files would be left unchanged.\nruff tests src examples benchmarks utils\nsrc/accelerate/utils/__init__.py:1:1: I001 [*] Import block is un-sorted or un-formatted\nsrc/accelerate/utils/modeling.py:15:1: I001 [*] Import block is un-sorted or un-formatted\nFound 2 errors.\n[*] 2 fixable with the `--fix` option.\nmake: *** [Makefile:16: quality] Error 1\n##[error]Process completed with exit code 2.\n"}], "diff": "diff --git a/src/accelerate/utils/__init__.py b/src/accelerate/utils/__init__.py\nindex ddf794a..7179afb 100644\n--- a/src/accelerate/utils/__init__.py\n+++ b/src/accelerate/utils/__init__.py\n@@ -59,7 +59,6 @@ from .imports import (\n is_comet_ml_available,\n is_cuda_available,\n is_datasets_available,\n- is_peft_available,\n is_deepspeed_available,\n is_dvclive_available,\n is_fp8_available,\n@@ -70,6 +69,7 @@ from .imports import (\n is_msamp_available,\n is_npu_available,\n is_pandas_available,\n+ is_peft_available,\n is_rich_available,\n is_sagemaker_available,\n is_tensorboard_available,\n@@ -81,7 +81,6 @@ from .imports import (\n is_xpu_available,\n )\n from .modeling import (\n- is_peft_model,\n calculate_maximum_sizes,\n check_device_map,\n check_tied_parameters_in_config,\n@@ -96,6 +95,7 @@ from .modeling import (\n get_mixed_precision_context_manager,\n id_tensor_storage,\n infer_auto_device_map,\n+ is_peft_model,\n load_checkpoint_in_model,\n load_offloaded_weights,\n load_state_dict,\ndiff --git a/src/accelerate/utils/modeling.py b/src/accelerate/utils/modeling.py\nindex 802b13c..03d3a39 100644\n--- a/src/accelerate/utils/modeling.py\n+++ b/src/accelerate/utils/modeling.py\n@@ -30,7 +30,7 @@ import torch.nn as nn\n from ..state import AcceleratorState\n from .constants import SAFE_WEIGHTS_NAME, WEIGHTS_NAME\n from .dataclasses import AutocastKwargs, CustomDtype, DistributedType\n-from .imports import is_mps_available, is_npu_available, is_xpu_available, is_peft_available\n+from .imports import is_mps_available, is_npu_available, is_peft_available, is_xpu_available\n from .offload import load_offloaded_weight, offload_weight, save_offload_index\n from .tqdm import is_tqdm_available, tqdm\n \n", "difficulty": 0, "changed_files": ["src/accelerate/utils/__init__.py", "src/accelerate/utils/modeling.py"], "commit_link": "https://github.com/huggingface/accelerate/tree/028ad1efee2c41691d78e5a4de90ebd6f8236cad"}
data/python/03669a5.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"language": "Python", "id": 32, "repo_owner": "mikel-brostrom", "repo_name": "yolo_tracking", "head_branch": "centroid-asso-support", "workflow_name": "CI CPU testing", "workflow_filename": "ci.yml", "workflow_path": ".github/workflows/ci.yml", "contributor": "mikel-brostrom", "sha_fail": "03669a5d72130c57575bedd657b82c601f08a982", "sha_success": "0948605abb9d6d962450f1bbf9a0b9c96c429b29", "workflow": "# name of the workflow, what it is doing (optional)\nname: CI CPU testing\n\n# events that trigger the workflow (required)\non:\n push:\n branches: [master, CIdebug]\n pull_request:\n # pull request where master is target\n branches: [master]\n\nenv:\n # Directory of PyPi package to be tested\n PACKAGE_DIR: boxmot\n # Minimum acceptable test coverage\n # Increase as you add more tests to increase coverage\n COVERAGE_FAIL_UNDER: 29\n\n# the workflow that gets triggerd\njobs:\n build:\n runs-on: ${{ matrix.os }}\n strategy:\n fail-fast: false\n matrix:\n os: [ubuntu-latest] # skip windows-latest for\n python-version: ['3.8', '3.9', '3.10']\n #model: ['yolov8n', 'yolo_nas_s', yolox_n] # yolo models to test\n #tracking-methods: ['deepocsort', 'ocsort', 'botsort', 'strongsort', 'bytetrack'] # tracking methods to test\n\n # Timeout: https://stackoverflow.com/a/59076067/4521646\n timeout-minutes: 50\n steps:\n\n - uses: actions/checkout@v4 # Check out the repository\n - uses: actions/setup-python@v5 # Prepare environment with python 3.9\n with:\n python-version: ${{ matrix.python-version }}\n cache: 'pip' # caching pip dependencies\n - name: Install requirements\n shell: bash # for Windows compatibility\n run: |\n python -m pip install --upgrade pip setuptools wheel\n pip install -e . pytest pytest-cov --extra-index-url https://download.pytorch.org/whl/cpu\n python --version\n pip --version\n pip list\n\n - name: Tests all tracking options\n shell: bash # for Windows compatibility\n env:\n IMG: ./assets/MOT17-mini/train/MOT17-05-FRCNN/img1/000001.jpg\n run: |\n # deepocsort fro all supported yolo models\n python examples/track.py --tracking-method deepocsort --source $IMG --imgsz 320\n python examples/track.py --yolo-model yolo_nas_s --tracking-method deepocsort --source $IMG --imgsz 320\n # python examples/track.py --yolo-model yolox_n --tracking-method deepocsort --source $IMG --imgsz 320\n\n # hybridsort\n python examples/track.py --tracking-method hybridsort --source $IMG --imgsz 320\n\n # botsort\n python examples/track.py --tracking-method botsort --source $IMG --imgsz 320\n\n # strongsort\n python examples/track.py --tracking-method strongsort --source $IMG --imgsz 320\n\n # ocsort\n python examples/track.py --tracking-method ocsort --source $IMG --imgsz 320\n\n # bytetrack\n python examples/track.py --tracking-method bytetrack --source $IMG --imgsz 320\n\n - name: Pytest tests # after tracking options as this does not download models\n shell: bash # for Windows compatibility\n run: |\n\n # needed in TFLite export\n wget https://github.com/PINTO0309/onnx2tf/releases/download/1.16.31/flatc.tar.gz\n tar -zxvf flatc.tar.gz\n sudo chmod +x flatc\n sudo mv flatc /usr/bin/\n\n pytest --cov=$PACKAGE_DIR --cov-report=html -v tests\n coverage report --fail-under=$COVERAGE_FAIL_UNDER\n\n - name: Tests exported reid models\n env:\n IMG: ./assets/MOT17-mini/train/MOT17-05-FRCNN/img1/000001.jpg\n shell: bash # for Windows compatibility\n run: |\n\n # test exported reid model\n python examples/track.py --reid-model examples/weights/osnet_x0_25_msmt17.torchscript --source $IMG --imgsz 320\n python examples/track.py --reid-model examples/weights/osnet_x0_25_msmt17.onnx --source $IMG --imgsz 320\n #python examples/track.py --reid-model examples/weights/osnet_x0_25_msmt17_saved_model/osnet_x0_25_msmt17_float16.tflite --source $IMG --imgsz 320\n python examples/track.py --reid-model examples/weights/osnet_x0_25_msmt17_openvino_model --source $IMG --imgsz 320\n\n - name: Test tracking with seg models\n env:\n IMG: ./assets/MOT17-mini/train/MOT17-05-FRCNN/img1/000001.jpg\n shell: bash # for Windows compatibility\n run: |\n # tracking with SEG models\n python examples/track.py --tracking-method deepocsort --yolo-model yolov8n-seg.pt --source $IMG\n\n - name: Test tracking with pose models\n env:\n IMG: ./assets/MOT17-mini/train/MOT17-05-FRCNN/img1/000001.jpg\n shell: bash # for Windows compatibility\n run: |\n # tracking with POSE models\n python3 examples/track.py --yolo-model weights/yolov8n.pt --source $IMG --imgsz 320\n\n - name: Test validation on MOT17 subset\n shell: bash # for Windows compatibility\n run: |\n # validation on a few MOT17 imges\n python examples/val.py --tracking-method deepocsort --yolo-model yolov8n.pt --benchmark MOT17-mini --imgsz 320 --conf 0.2\n\n - name: Test evolution on MOT17 subset\n shell: bash # for Windows compatibility\n run: |\n # evolve a for a single set of parameters\n python examples/evolve.py --objectives HOTA,MOTA,IDF1 --benchmark MOT17-mini --n-trials 1 --imgsz 320 --conf 0.2\n", "logs": [{"step_name": "build (ubuntu-latest, 3.8)/6_Pytest tests.txt", "log": "##[group]Run # needed in TFLite export\n\u001b[36;1m\u001b[0m\n\u001b[36;1m# needed in TFLite export\u001b[0m\n\u001b[36;1mwget https://github.com/PINTO0309/onnx2tf/releases/download/1.16.31/flatc.tar.gz\u001b[0m\n\u001b[36;1mtar -zxvf flatc.tar.gz\u001b[0m\n\u001b[36;1msudo chmod +x flatc\u001b[0m\n\u001b[36;1msudo mv flatc /usr/bin/\u001b[0m\n\u001b[36;1m\u001b[0m\n\u001b[36;1mpytest --cov=$PACKAGE_DIR --cov-report=html -v tests\u001b[0m\n\u001b[36;1mcoverage report --fail-under=$COVERAGE_FAIL_UNDER\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\nenv:\n PACKAGE_DIR: boxmot\n COVERAGE_FAIL_UNDER: 29\n pythonLocation: /opt/hostedtoolcache/Python/3.8.18/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.8.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.8.18/x64/lib\n##[endgroup]\n--2024-01-12 20:11:25-- https://github.com/PINTO0309/onnx2tf/releases/download/1.16.31/flatc.tar.gz\nResolving github.com (github.com)... 140.82.112.3\nConnecting to github.com (github.com)|140.82.112.3|:443... connected.\nHTTP request sent, awaiting response... 302 Found\nLocation: https://objects.githubusercontent.com/github-production-release-asset-2e65be/541831874/29499355-44ab-4fb6-86c8-582f4bad68a3?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIAVCODYLSA53PQK4ZA%2F20240112%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Date=20240112T201125Z&X-Amz-Expires=300&X-Amz-Signature=3bfcceeafbe583d085cec55f86a8526c6e9f64b963ff80c4ac3797875e856d28&X-Amz-SignedHeaders=host&actor_id=0&key_id=0&repo_id=541831874&response-content-disposition=attachment%3B%20filename%3Dflatc.tar.gz&response-content-type=application%2Foctet-stream [following]\n--2024-01-12 20:11:25-- https://objects.githubusercontent.com/github-production-release-asset-2e65be/541831874/29499355-44ab-4fb6-86c8-582f4bad68a3?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIAVCODYLSA53PQK4ZA%2F20240112%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Date=20240112T201125Z&X-Amz-Expires=300&X-Amz-Signature=3bfcceeafbe583d085cec55f86a8526c6e9f64b963ff80c4ac3797875e856d28&X-Amz-SignedHeaders=host&actor_id=0&key_id=0&repo_id=541831874&response-content-disposition=attachment%3B%20filename%3Dflatc.tar.gz&response-content-type=application%2Foctet-stream\nResolving objects.githubusercontent.com (objects.githubusercontent.com)... 185.199.110.133, 185.199.108.133, 185.199.109.133, ...\nConnecting to objects.githubusercontent.com (objects.githubusercontent.com)|185.199.110.133|:443... connected.\nHTTP request sent, awaiting response... 200 OK\nLength: 1382707 (1.3M) [application/octet-stream]\nSaving to: \u2018flatc.tar.gz\u2019\n\n 0K .......... .......... .......... .......... .......... 3% 28.1M 0s\n 50K .......... .......... .......... .......... .......... 7% 36.5M 0s\n 100K .......... .......... .......... .......... .......... 11% 143M 0s\n 150K .......... .......... .......... .......... .......... 14% 42.2M 0s\n 200K .......... .......... .......... .......... .......... 18% 131M 0s\n 250K .......... .......... .......... .......... .......... 22% 167M 0s\n 300K .......... .......... .......... .......... .......... 25% 200M 0s\n 350K .......... .......... .......... .......... .......... 29% 74.0M 0s\n 400K .......... .......... .......... .......... .......... 33% 187M 0s\n 450K .......... .......... .......... .......... .......... 37% 172M 0s\n 500K .......... .......... .......... .......... .......... 40% 132M 0s\n 550K .......... .......... .......... .......... .......... 44% 233M 0s\n 600K .......... .......... .......... .......... .......... 48% 155M 0s\n 650K .......... .......... .......... .......... .......... 51% 208M 0s\n 700K .......... .......... .......... .......... .......... 55% 136M 0s\n 750K .......... .......... .......... .......... .......... 59% 223M 0s\n 800K .......... .......... .......... .......... .......... 62% 136M 0s\n 850K .......... .......... .......... .......... .......... 66% 140M 0s\n 900K .......... .......... .......... .......... .......... 70% 238M 0s\n 950K .......... .......... .......... .......... .......... 74% 241M 0s\n 1000K .......... .......... .......... .......... .......... 77% 242M 0s\n 1050K .......... .......... .......... .......... .......... 81% 215M 0s\n 1100K .......... .......... .......... .......... .......... 85% 238M 0s\n 1150K .......... .......... .......... .......... .......... 88% 240M 0s\n 1200K .......... .......... .......... .......... .......... 92% 236M 0s\n 1250K .......... .......... .......... .......... .......... 96% 198M 0s\n 1300K .......... .......... .......... .......... .......... 99% 251M 0s\n 1350K 100% 572G=0.01s\n\n2024-01-12 20:11:25 (120 MB/s) - \u2018flatc.tar.gz\u2019 saved [1382707/1382707]\n\nflatc\n============================= test session starts ==============================\nplatform linux -- Python 3.8.18, pytest-7.4.4, pluggy-1.3.0 -- /opt/hostedtoolcache/Python/3.8.18/x64/bin/python\ncachedir: .pytest_cache\nrootdir: /home/runner/work/yolo_tracking/yolo_tracking\nplugins: hydra-core-1.3.2, cov-4.1.0\ncollecting ... collected 14 items\n\ntests/test_exports.py::test_export_torchscript PASSED [ 7%]\ntests/test_exports.py::test_export_onnx PASSED [ 14%]\ntests/test_exports.py::test_export_openvino PASSED [ 21%]\ntests/test_python.py::test_strongsort_instantiation PASSED [ 28%]\ntests/test_python.py::test_botsort_instantiation PASSED [ 35%]\ntests/test_python.py::test_deepocsort_instantiation PASSED [ 42%]\ntests/test_python.py::test_ocsort_instantiation PASSED [ 50%]\ntests/test_python.py::test_bytetrack_instantiation PASSED [ 57%]\ntests/test_python.py::test_deepocsort_output FAILED [ 64%]\ntests/test_python.py::test_ocsort_output PASSED [ 71%]\ntests/test_python.py::test_botsort_output PASSED [ 78%]\ntests/test_python.py::test_bytetrack_output PASSED [ 85%]\ntests/test_python.py::test_strongsort_output PASSED [ 92%]\ntests/test_python.py::test_gsi PASSED [100%]\n\n=================================== FAILURES ===================================\n____________________________ test_deepocsort_output ____________________________\n\n def test_deepocsort_output():\n tracker_conf = get_tracker_config('deepocsort')\n tracker = create_tracker(\n tracker_type='deepocsort',\n tracker_config=tracker_conf,\n reid_weights=WEIGHTS / 'mobilenetv2_x1_4_dukemtmcreid.pt',\n device='cpu',\n half=False,\n per_class=False\n )\n rgb = np.random.randint(255, size=(640, 640, 3), dtype=np.uint8)\n det = np.array([[144, 212, 578, 480, 0.82, 0],\n [425, 281, 576, 472, 0.56, 65]])\n output = tracker.update(det, rgb)\n # Works since frame count is less than min hits (1 <= 2)\n assert output.shape == (2, 8) # two inputs should give two outputs\n output = np.flip(np.delete(output, [4, 7], axis=1), axis=0)\n assert_allclose(det, output, atol=1, rtol=7e-3, verbose=True)\n \n # Instantiate new tracker and ensure minimum number of hits works\n tracker = create_tracker(\n tracker_type='deepocsort',\n tracker_config=tracker_conf,\n reid_weights=WEIGHTS / 'mobilenetv2_x1_4_dukemtmcreid.pt',\n device='cpu',\n half=False,\n per_class=False\n )\n tracker.min_hits = 2\n output = tracker.update(np.empty((0, 6)), rgb)\n assert output.size == 0\n output = tracker.update(np.empty((0, 6)), rgb)\n assert output.size == 0\n output = tracker.update(det, rgb)\n assert output.size == 0\n> output = tracker.update(det, rgb)\n\ntests/test_python.py:86: \n_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ \nboxmot/trackers/deepocsort/deep_ocsort.py:434: in update\n matched, unmatched_dets, unmatched_trks = associate(\nboxmot/utils/association.py:146: in associate\n iou_matrix = run_asso_func(asso_func, detections, trackers, w, h)\n_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ \n\nfunc = <function giou_batch at 0x7f1dba152ca0>\nargs = (array([[1.44e+02, 2.12e+02, 5.78e+02, 4.80e+02, 8.20e-01],\n [4.25e+02, 2.81e+02, 5.76e+02, 4.72e+02, 5.60e-01]]), array([[144., 212., 578., 480., 0.],\n [425., 281., 576., 472., 0.]]), 640, 640)\n\n def run_asso_func(func, *args):\n \"\"\"\n Wrapper function that checks the inputs to the association functions\n and then call either one of the iou association functions or centroid.\n \n Parameters:\n func: The batch function to call (either *iou*_batch or centroid_batch).\n *args: Variable length argument list, containing either bounding boxes and optionally size parameters.\n \"\"\"\n if func not in [iou_batch, giou_batch, diou_batch, ciou_batch, centroid_batch]:\n raise ValueError(\"Invalid function specified. Must be either '(g,d,c, )iou_batch' or 'centroid_batch'.\")\n \n if func is (iou_batch or giou_batch or diou_batch or ciou_batch):\n if len(args) != 4 or not all(isinstance(arg, (list, np.ndarray)) for arg in args[0:2]):\n raise ValueError(\"Invalid arguments for iou_batch. Expected two bounding boxes.\")\n return func(*args)\n elif func is centroid_batch:\n if len(args) != 4 or not all(isinstance(arg, (list, np.ndarray)) for arg in args[:2]) or not all(isinstance(arg, (int)) for arg in args[2:]):\n raise ValueError(\"Invalid arguments for centroid_batch. Expected two bounding boxes and two size parameters.\")\n return func(*args)\n else:\n> raise ValueError(\"No such association method\")\nE ValueError: No such association method\n\nboxmot/utils/iou.py:212: ValueError\n----------------------------- Captured stderr call -----------------------------\nDownloading...\nFrom: https://drive.google.com/uc?id=12uD5FeVqLg9-AFDju2L7SQxjmPb4zpBN\nTo: /home/runner/work/yolo_tracking/yolo_tracking/examples/weights/mobilenetv2_x1_4_dukemtmcreid.pt\n\n 0%| | 0.00/22.4M [00:00<?, ?B/s]\n 9%|\u2589 | 2.10M/22.4M [00:00<00:01, 19.5MB/s]\n 40%|\u2588\u2588\u2588\u2589 | 8.91M/22.4M [00:00<00:00, 43.6MB/s]\n 82%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 18.4M/22.4M [00:00<00:00, 65.1MB/s]\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 22.4M/22.4M [00:00<00:00, 64.2MB/s]\n\u001b[32m2024-01-12 20:11:55.585\u001b[0m | \u001b[32m\u001b[1mSUCCESS \u001b[0m | \u001b[36mboxmot.appearance.reid_model_factory\u001b[0m:\u001b[36mload_pretrained_weights\u001b[0m:\u001b[36m207\u001b[0m - \u001b[32m\u001b[1mSuccessfully loaded pretrained weights from \"/home/runner/work/yolo_tracking/yolo_tracking/examples/weights/mobilenetv2_x1_4_dukemtmcreid.pt\"\u001b[0m\n\u001b[32m2024-01-12 20:11:55.738\u001b[0m | \u001b[32m\u001b[1mSUCCESS \u001b[0m | \u001b[36mboxmot.appearance.reid_model_factory\u001b[0m:\u001b[36mload_pretrained_weights\u001b[0m:\u001b[36m207\u001b[0m - \u001b[32m\u001b[1mSuccessfully loaded pretrained weights from \"/home/runner/work/yolo_tracking/yolo_tracking/examples/weights/mobilenetv2_x1_4_dukemtmcreid.pt\"\u001b[0m\n=============================== warnings summary ===============================\nboxmot/utils/checks.py:5\n /home/runner/work/yolo_tracking/yolo_tracking/boxmot/utils/checks.py:5: DeprecationWarning: pkg_resources is deprecated as an API. See https://setuptools.pypa.io/en/latest/pkg_resources.html\n import pkg_resources\n\n../../../../../opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/site-packages/pkg_resources/__init__.py:2868\n /opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/site-packages/pkg_resources/__init__.py:2868: DeprecationWarning: Deprecated call to `pkg_resources.declare_namespace('mpl_toolkits')`.\n Implementing implicit namespace packages (as specified in PEP 420) is preferred to `pkg_resources.declare_namespace`. See https://setuptools.pypa.io/en/latest/references/keywords.html#keyword-namespace-packages\n declare_namespace(pkg)\n\n../../../../../opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/site-packages/pkg_resources/__init__.py:2868\n /opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/site-packages/pkg_resources/__init__.py:2868: DeprecationWarning: Deprecated call to `pkg_resources.declare_namespace('google')`.\n Implementing implicit namespace packages (as specified in PEP 420) is preferred to `pkg_resources.declare_namespace`. See https://setuptools.pypa.io/en/latest/references/keywords.html#keyword-namespace-packages\n declare_namespace(pkg)\n\n../../../../../opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/site-packages/pkg_resources/__init__.py:2868\n../../../../../opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/site-packages/pkg_resources/__init__.py:2868\n../../../../../opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/site-packages/pkg_resources/__init__.py:2868\n../../../../../opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/site-packages/pkg_resources/__init__.py:2868\n /opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/site-packages/pkg_resources/__init__.py:2868: DeprecationWarning: Deprecated call to `pkg_resources.declare_namespace('sphinxcontrib')`.\n Implementing implicit namespace packages (as specified in PEP 420) is preferred to `pkg_resources.declare_namespace`. See https://setuptools.pypa.io/en/latest/references/keywords.html#keyword-namespace-packages\n declare_namespace(pkg)\n\ntests/test_exports.py::test_export_onnx\n /home/runner/work/yolo_tracking/yolo_tracking/boxmot/appearance/backbones/osnet.py:389: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs!\n if return_featuremaps:\n\ntests/test_exports.py::test_export_openvino\n /opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/site-packages/defusedxml/__init__.py:30: DeprecationWarning: defusedxml.cElementTree is deprecated, import from defusedxml.ElementTree instead.\n from . import cElementTree\n\n-- Docs: https://docs.pytest.org/en/stable/how-to/capture-warnings.html\n\n---------- coverage: platform linux, python 3.8.18-final-0 -----------\nCoverage HTML written to dir htmlcov\n\n=========================== short test summary info ============================\nFAILED tests/test_python.py::test_deepocsort_output - ValueError: No such association method\n================== 1 failed, 13 passed, 9 warnings in 31.85s ===================\n##[error]Process completed with exit code 1.\n"}, {"step_name": "build (ubuntu-latest, 3.9)/6_Pytest tests.txt", "log": "##[group]Run # needed in TFLite export\n\u001b[36;1m\u001b[0m\n\u001b[36;1m# needed in TFLite export\u001b[0m\n\u001b[36;1mwget https://github.com/PINTO0309/onnx2tf/releases/download/1.16.31/flatc.tar.gz\u001b[0m\n\u001b[36;1mtar -zxvf flatc.tar.gz\u001b[0m\n\u001b[36;1msudo chmod +x flatc\u001b[0m\n\u001b[36;1msudo mv flatc /usr/bin/\u001b[0m\n\u001b[36;1m\u001b[0m\n\u001b[36;1mpytest --cov=$PACKAGE_DIR --cov-report=html -v tests\u001b[0m\n\u001b[36;1mcoverage report --fail-under=$COVERAGE_FAIL_UNDER\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\nenv:\n PACKAGE_DIR: boxmot\n COVERAGE_FAIL_UNDER: 29\n pythonLocation: /opt/hostedtoolcache/Python/3.9.18/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib\n##[endgroup]\n--2024-01-12 20:11:58-- https://github.com/PINTO0309/onnx2tf/releases/download/1.16.31/flatc.tar.gz\nResolving github.com (github.com)... 192.30.255.113\nConnecting to github.com (github.com)|192.30.255.113|:443... connected.\nHTTP request sent, awaiting response... 302 Found\nLocation: https://objects.githubusercontent.com/github-production-release-asset-2e65be/541831874/29499355-44ab-4fb6-86c8-582f4bad68a3?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIAVCODYLSA53PQK4ZA%2F20240112%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Date=20240112T201159Z&X-Amz-Expires=300&X-Amz-Signature=709497883dd2c2a9471410daf12c63e704acb6b779fb216aa16d512cc5577229&X-Amz-SignedHeaders=host&actor_id=0&key_id=0&repo_id=541831874&response-content-disposition=attachment%3B%20filename%3Dflatc.tar.gz&response-content-type=application%2Foctet-stream [following]\n--2024-01-12 20:11:59-- https://objects.githubusercontent.com/github-production-release-asset-2e65be/541831874/29499355-44ab-4fb6-86c8-582f4bad68a3?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIAVCODYLSA53PQK4ZA%2F20240112%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Date=20240112T201159Z&X-Amz-Expires=300&X-Amz-Signature=709497883dd2c2a9471410daf12c63e704acb6b779fb216aa16d512cc5577229&X-Amz-SignedHeaders=host&actor_id=0&key_id=0&repo_id=541831874&response-content-disposition=attachment%3B%20filename%3Dflatc.tar.gz&response-content-type=application%2Foctet-stream\nResolving objects.githubusercontent.com (objects.githubusercontent.com)... 185.199.111.133, 185.199.108.133, 185.199.109.133, ...\nConnecting to objects.githubusercontent.com (objects.githubusercontent.com)|185.199.111.133|:443... connected.\nHTTP request sent, awaiting response... 200 OK\nLength: 1382707 (1.3M) [application/octet-stream]\nSaving to: \u2018flatc.tar.gz\u2019\n\n 0K .......... .......... .......... .......... .......... 3% 10.7M 0s\n 50K .......... .......... .......... .......... .......... 7% 44.1M 0s\n 100K .......... .......... .......... .......... .......... 11% 89.2M 0s\n 150K .......... .......... .......... .......... .......... 14% 62.8M 0s\n 200K .......... .......... .......... .......... .......... 18% 108M 0s\n 250K .......... .......... .......... .......... .......... 22% 163M 0s\n 300K .......... .......... .......... .......... .......... 25% 186M 0s\n 350K .......... .......... .......... .......... .......... 29% 202M 0s\n 400K .......... .......... .......... .......... .......... 33% 108M 0s\n 450K .......... .......... .......... .......... .......... 37% 199M 0s\n 500K .......... .......... .......... .......... .......... 40% 190M 0s\n 550K .......... .......... .......... .......... .......... 44% 198M 0s\n 600K .......... .......... .......... .......... .......... 48% 171M 0s\n 650K .......... .......... .......... .......... .......... 51% 212M 0s\n 700K .......... .......... .......... .......... .......... 55% 170M 0s\n 750K .......... .......... .......... .......... .......... 59% 185M 0s\n 800K .......... .......... .......... .......... .......... 62% 199M 0s\n 850K .......... .......... .......... .......... .......... 66% 259M 0s\n 900K .......... .......... .......... .......... .......... 70% 191M 0s\n 950K .......... .......... .......... .......... .......... 74% 205M 0s\n 1000K .......... .......... .......... .......... .......... 77% 193M 0s\n 1050K .......... .......... .......... .......... .......... 81% 267M 0s\n 1100K .......... .......... .......... .......... .......... 85% 271M 0s\n 1150K .......... .......... .......... .......... .......... 88% 239M 0s\n 1200K .......... .......... .......... .......... .......... 92% 268M 0s\n 1250K .......... .......... .......... .......... .......... 96% 260M 0s\n 1300K .......... .......... .......... .......... .......... 99% 269M 0s\n 1350K 100% 572G=0.01s\n\n2024-01-12 20:11:59 (103 MB/s) - \u2018flatc.tar.gz\u2019 saved [1382707/1382707]\n\nflatc\n============================= test session starts ==============================\nplatform linux -- Python 3.9.18, pytest-7.4.4, pluggy-1.3.0 -- /opt/hostedtoolcache/Python/3.9.18/x64/bin/python\ncachedir: .pytest_cache\nrootdir: /home/runner/work/yolo_tracking/yolo_tracking\nplugins: hydra-core-1.3.2, cov-4.1.0\ncollecting ... collected 14 items\n\ntests/test_exports.py::test_export_torchscript PASSED [ 7%]\ntests/test_exports.py::test_export_onnx PASSED [ 14%]\ntests/test_exports.py::test_export_openvino PASSED [ 21%]\ntests/test_python.py::test_strongsort_instantiation PASSED [ 28%]\ntests/test_python.py::test_botsort_instantiation PASSED [ 35%]\ntests/test_python.py::test_deepocsort_instantiation PASSED [ 42%]\ntests/test_python.py::test_ocsort_instantiation PASSED [ 50%]\ntests/test_python.py::test_bytetrack_instantiation PASSED [ 57%]\ntests/test_python.py::test_deepocsort_output FAILED [ 64%]\ntests/test_python.py::test_ocsort_output PASSED [ 71%]\ntests/test_python.py::test_botsort_output PASSED [ 78%]\ntests/test_python.py::test_bytetrack_output PASSED [ 85%]\ntests/test_python.py::test_strongsort_output PASSED [ 92%]\ntests/test_python.py::test_gsi PASSED [100%]\n\n=================================== FAILURES ===================================\n____________________________ test_deepocsort_output ____________________________\n\n def test_deepocsort_output():\n tracker_conf = get_tracker_config('deepocsort')\n tracker = create_tracker(\n tracker_type='deepocsort',\n tracker_config=tracker_conf,\n reid_weights=WEIGHTS / 'mobilenetv2_x1_4_dukemtmcreid.pt',\n device='cpu',\n half=False,\n per_class=False\n )\n rgb = np.random.randint(255, size=(640, 640, 3), dtype=np.uint8)\n det = np.array([[144, 212, 578, 480, 0.82, 0],\n [425, 281, 576, 472, 0.56, 65]])\n output = tracker.update(det, rgb)\n # Works since frame count is less than min hits (1 <= 2)\n assert output.shape == (2, 8) # two inputs should give two outputs\n output = np.flip(np.delete(output, [4, 7], axis=1), axis=0)\n assert_allclose(det, output, atol=1, rtol=7e-3, verbose=True)\n \n # Instantiate new tracker and ensure minimum number of hits works\n tracker = create_tracker(\n tracker_type='deepocsort',\n tracker_config=tracker_conf,\n reid_weights=WEIGHTS / 'mobilenetv2_x1_4_dukemtmcreid.pt',\n device='cpu',\n half=False,\n per_class=False\n )\n tracker.min_hits = 2\n output = tracker.update(np.empty((0, 6)), rgb)\n assert output.size == 0\n output = tracker.update(np.empty((0, 6)), rgb)\n assert output.size == 0\n output = tracker.update(det, rgb)\n assert output.size == 0\n> output = tracker.update(det, rgb)\n\ntests/test_python.py:86: \n_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ \nboxmot/trackers/deepocsort/deep_ocsort.py:434: in update\n matched, unmatched_dets, unmatched_trks = associate(\nboxmot/utils/association.py:146: in associate\n iou_matrix = run_asso_func(asso_func, detections, trackers, w, h)\n_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ \n\nfunc = <function giou_batch at 0x7ff526351e50>\nargs = (array([[1.44e+02, 2.12e+02, 5.78e+02, 4.80e+02, 8.20e-01],\n [4.25e+02, 2.81e+02, 5.76e+02, 4.72e+02, 5.60e-01]]), array([[144., 212., 578., 480., 0.],\n [425., 281., 576., 472., 0.]]), 640, 640)\n\n def run_asso_func(func, *args):\n \"\"\"\n Wrapper function that checks the inputs to the association functions\n and then call either one of the iou association functions or centroid.\n \n Parameters:\n func: The batch function to call (either *iou*_batch or centroid_batch).\n *args: Variable length argument list, containing either bounding boxes and optionally size parameters.\n \"\"\"\n if func not in [iou_batch, giou_batch, diou_batch, ciou_batch, centroid_batch]:\n raise ValueError(\"Invalid function specified. Must be either '(g,d,c, )iou_batch' or 'centroid_batch'.\")\n \n if func is (iou_batch or giou_batch or diou_batch or ciou_batch):\n if len(args) != 4 or not all(isinstance(arg, (list, np.ndarray)) for arg in args[0:2]):\n raise ValueError(\"Invalid arguments for iou_batch. Expected two bounding boxes.\")\n return func(*args)\n elif func is centroid_batch:\n if len(args) != 4 or not all(isinstance(arg, (list, np.ndarray)) for arg in args[:2]) or not all(isinstance(arg, (int)) for arg in args[2:]):\n raise ValueError(\"Invalid arguments for centroid_batch. Expected two bounding boxes and two size parameters.\")\n return func(*args)\n else:\n> raise ValueError(\"No such association method\")\nE ValueError: No such association method\n\nboxmot/utils/iou.py:212: ValueError\n----------------------------- Captured stderr call -----------------------------\nDownloading...\nFrom: https://drive.google.com/uc?id=12uD5FeVqLg9-AFDju2L7SQxjmPb4zpBN\nTo: /home/runner/work/yolo_tracking/yolo_tracking/examples/weights/mobilenetv2_x1_4_dukemtmcreid.pt\n\n 0%| | 0.00/22.4M [00:00<?, ?B/s]\n 5%|\u258d | 1.05M/22.4M [00:00<00:02, 9.56MB/s]\n 21%|\u2588\u2588 | 4.72M/22.4M [00:00<00:00, 21.4MB/s]\n 54%|\u2588\u2588\u2588\u2588\u2588\u258e | 12.1M/22.4M [00:00<00:00, 32.0MB/s]\n 77%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 17.3M/22.4M [00:00<00:00, 32.2MB/s]\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 22.4M/22.4M [00:00<00:00, 37.0MB/s]\n\u001b[32m2024-01-12 20:12:31.441\u001b[0m | \u001b[32m\u001b[1mSUCCESS \u001b[0m | \u001b[36mboxmot.appearance.reid_model_factory\u001b[0m:\u001b[36mload_pretrained_weights\u001b[0m:\u001b[36m207\u001b[0m - \u001b[32m\u001b[1mSuccessfully loaded pretrained weights from \"/home/runner/work/yolo_tracking/yolo_tracking/examples/weights/mobilenetv2_x1_4_dukemtmcreid.pt\"\u001b[0m\n\u001b[32m2024-01-12 20:12:31.599\u001b[0m | \u001b[32m\u001b[1mSUCCESS \u001b[0m | \u001b[36mboxmot.appearance.reid_model_factory\u001b[0m:\u001b[36mload_pretrained_weights\u001b[0m:\u001b[36m207\u001b[0m - \u001b[32m\u001b[1mSuccessfully loaded pretrained weights from \"/home/runner/work/yolo_tracking/yolo_tracking/examples/weights/mobilenetv2_x1_4_dukemtmcreid.pt\"\u001b[0m\n=============================== warnings summary ===============================\nboxmot/utils/checks.py:5\n /home/runner/work/yolo_tracking/yolo_tracking/boxmot/utils/checks.py:5: DeprecationWarning: pkg_resources is deprecated as an API. See https://setuptools.pypa.io/en/latest/pkg_resources.html\n import pkg_resources\n\n../../../../../opt/hostedtoolcache/Python/3.9.18/x64/lib/python3.9/site-packages/pkg_resources/__init__.py:2868\n /opt/hostedtoolcache/Python/3.9.18/x64/lib/python3.9/site-packages/pkg_resources/__init__.py:2868: DeprecationWarning: Deprecated call to `pkg_resources.declare_namespace('google')`.\n Implementing implicit namespace packages (as specified in PEP 420) is preferred to `pkg_resources.declare_namespace`. See https://setuptools.pypa.io/en/latest/references/keywords.html#keyword-namespace-packages\n declare_namespace(pkg)\n\n../../../../../opt/hostedtoolcache/Python/3.9.18/x64/lib/python3.9/site-packages/pkg_resources/__init__.py:2868\n../../../../../opt/hostedtoolcache/Python/3.9.18/x64/lib/python3.9/site-packages/pkg_resources/__init__.py:2868\n../../../../../opt/hostedtoolcache/Python/3.9.18/x64/lib/python3.9/site-packages/pkg_resources/__init__.py:2868\n../../../../../opt/hostedtoolcache/Python/3.9.18/x64/lib/python3.9/site-packages/pkg_resources/__init__.py:2868\n /opt/hostedtoolcache/Python/3.9.18/x64/lib/python3.9/site-packages/pkg_resources/__init__.py:2868: DeprecationWarning: Deprecated call to `pkg_resources.declare_namespace('sphinxcontrib')`.\n Implementing implicit namespace packages (as specified in PEP 420) is preferred to `pkg_resources.declare_namespace`. See https://setuptools.pypa.io/en/latest/references/keywords.html#keyword-namespace-packages\n declare_namespace(pkg)\n\ntests/test_exports.py::test_export_onnx\n /home/runner/work/yolo_tracking/yolo_tracking/boxmot/appearance/backbones/osnet.py:389: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs!\n if return_featuremaps:\n\ntests/test_exports.py::test_export_openvino\n /opt/hostedtoolcache/Python/3.9.18/x64/lib/python3.9/site-packages/defusedxml/__init__.py:30: DeprecationWarning: defusedxml.cElementTree is deprecated, import from defusedxml.ElementTree instead.\n from . import cElementTree\n\n-- Docs: https://docs.pytest.org/en/stable/how-to/capture-warnings.html\n\n---------- coverage: platform linux, python 3.9.18-final-0 -----------\nCoverage HTML written to dir htmlcov\n\n=========================== short test summary info ============================\nFAILED tests/test_python.py::test_deepocsort_output - ValueError: No such association method\n================== 1 failed, 13 passed, 8 warnings in 33.86s ===================\n##[error]Process completed with exit code 1.\n"}, {"step_name": "build (ubuntu-latest, 3.10)/6_Pytest tests.txt", "log": "##[group]Run # needed in TFLite export\n\u001b[36;1m\u001b[0m\n\u001b[36;1m# needed in TFLite export\u001b[0m\n\u001b[36;1mwget https://github.com/PINTO0309/onnx2tf/releases/download/1.16.31/flatc.tar.gz\u001b[0m\n\u001b[36;1mtar -zxvf flatc.tar.gz\u001b[0m\n\u001b[36;1msudo chmod +x flatc\u001b[0m\n\u001b[36;1msudo mv flatc /usr/bin/\u001b[0m\n\u001b[36;1m\u001b[0m\n\u001b[36;1mpytest --cov=$PACKAGE_DIR --cov-report=html -v tests\u001b[0m\n\u001b[36;1mcoverage report --fail-under=$COVERAGE_FAIL_UNDER\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\nenv:\n PACKAGE_DIR: boxmot\n COVERAGE_FAIL_UNDER: 29\n pythonLocation: /opt/hostedtoolcache/Python/3.10.13/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.10.13/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.10.13/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.10.13/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.10.13/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.10.13/x64/lib\n##[endgroup]\n--2024-01-12 20:12:02-- https://github.com/PINTO0309/onnx2tf/releases/download/1.16.31/flatc.tar.gz\nResolving github.com (github.com)... 140.82.113.3\nConnecting to github.com (github.com)|140.82.113.3|:443... connected.\nHTTP request sent, awaiting response... 302 Found\nLocation: https://objects.githubusercontent.com/github-production-release-asset-2e65be/541831874/29499355-44ab-4fb6-86c8-582f4bad68a3?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIAVCODYLSA53PQK4ZA%2F20240112%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Date=20240112T201203Z&X-Amz-Expires=300&X-Amz-Signature=f339441879645b502b6bf0bbc8e04deb0dcdf54ed1dcd223e8a7e04adef107ad&X-Amz-SignedHeaders=host&actor_id=0&key_id=0&repo_id=541831874&response-content-disposition=attachment%3B%20filename%3Dflatc.tar.gz&response-content-type=application%2Foctet-stream [following]\n--2024-01-12 20:12:03-- https://objects.githubusercontent.com/github-production-release-asset-2e65be/541831874/29499355-44ab-4fb6-86c8-582f4bad68a3?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIAVCODYLSA53PQK4ZA%2F20240112%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Date=20240112T201203Z&X-Amz-Expires=300&X-Amz-Signature=f339441879645b502b6bf0bbc8e04deb0dcdf54ed1dcd223e8a7e04adef107ad&X-Amz-SignedHeaders=host&actor_id=0&key_id=0&repo_id=541831874&response-content-disposition=attachment%3B%20filename%3Dflatc.tar.gz&response-content-type=application%2Foctet-stream\nResolving objects.githubusercontent.com (objects.githubusercontent.com)... 185.199.108.133, 185.199.110.133, 185.199.109.133, ...\nConnecting to objects.githubusercontent.com (objects.githubusercontent.com)|185.199.108.133|:443... connected.\nHTTP request sent, awaiting response... 200 OK\nLength: 1382707 (1.3M) [application/octet-stream]\nSaving to: \u2018flatc.tar.gz\u2019\n\n 0K .......... .......... .......... .......... .......... 3% 5.77M 0s\n 50K .......... .......... .......... .......... .......... 7% 6.18M 0s\n 100K .......... .......... .......... .......... .......... 11% 30.6M 0s\n 150K .......... .......... .......... .......... .......... 14% 39.6M 0s\n 200K .......... .......... .......... .......... .......... 18% 9.54M 0s\n 250K .......... .......... .......... .......... .......... 22% 49.5M 0s\n 300K .......... .......... .......... .......... .......... 25% 65.1M 0s\n 350K .......... .......... .......... .......... .......... 29% 45.1M 0s\n 400K .......... .......... .......... .......... .......... 33% 47.7M 0s\n 450K .......... .......... .......... .......... .......... 37% 86.6M 0s\n 500K .......... .......... .......... .......... .......... 40% 13.9M 0s\n 550K .......... .......... .......... .......... .......... 44% 44.2M 0s\n 600K .......... .......... .......... .......... .......... 48% 74.1M 0s\n 650K .......... .......... .......... .......... .......... 51% 127M 0s\n 700K .......... .......... .......... .......... .......... 55% 88.8M 0s\n 750K .......... .......... .......... .......... .......... 59% 126M 0s\n 800K .......... .......... .......... .......... .......... 62% 178M 0s\n 850K .......... .......... .......... .......... .......... 66% 77.3M 0s\n 900K .......... .......... .......... .......... .......... 70% 209M 0s\n 950K .......... .......... .......... .......... .......... 74% 180M 0s\n 1000K .......... .......... .......... .......... .......... 77% 159M 0s\n 1050K .......... .......... .......... .......... .......... 81% 14.6M 0s\n 1100K .......... .......... .......... .......... .......... 85% 52.9M 0s\n 1150K .......... .......... .......... .......... .......... 88% 186M 0s\n 1200K .......... .......... .......... .......... .......... 92% 127M 0s\n 1250K .......... .......... .......... .......... .......... 96% 210M 0s\n 1300K .......... .......... .......... .......... .......... 99% 216M 0s\n 1350K 100% 8.87M=0.04s\n\n2024-01-12 20:12:03 (31.1 MB/s) - \u2018flatc.tar.gz\u2019 saved [1382707/1382707]\n\nflatc\n============================= test session starts ==============================\nplatform linux -- Python 3.10.13, pytest-7.4.4, pluggy-1.3.0 -- /opt/hostedtoolcache/Python/3.10.13/x64/bin/python\ncachedir: .pytest_cache\nrootdir: /home/runner/work/yolo_tracking/yolo_tracking\nplugins: hydra-core-1.3.2, cov-4.1.0\ncollecting ... collected 14 items\n\ntests/test_exports.py::test_export_torchscript PASSED [ 7%]\ntests/test_exports.py::test_export_onnx PASSED [ 14%]\ntests/test_exports.py::test_export_openvino PASSED [ 21%]\ntests/test_python.py::test_strongsort_instantiation PASSED [ 28%]\ntests/test_python.py::test_botsort_instantiation PASSED [ 35%]\ntests/test_python.py::test_deepocsort_instantiation PASSED [ 42%]\ntests/test_python.py::test_ocsort_instantiation PASSED [ 50%]\ntests/test_python.py::test_bytetrack_instantiation PASSED [ 57%]\ntests/test_python.py::test_deepocsort_output FAILED [ 64%]\ntests/test_python.py::test_ocsort_output PASSED [ 71%]\ntests/test_python.py::test_botsort_output PASSED [ 78%]\ntests/test_python.py::test_bytetrack_output PASSED [ 85%]\ntests/test_python.py::test_strongsort_output PASSED [ 92%]\ntests/test_python.py::test_gsi PASSED [100%]\n\n=================================== FAILURES ===================================\n____________________________ test_deepocsort_output ____________________________\n\n def test_deepocsort_output():\n tracker_conf = get_tracker_config('deepocsort')\n tracker = create_tracker(\n tracker_type='deepocsort',\n tracker_config=tracker_conf,\n reid_weights=WEIGHTS / 'mobilenetv2_x1_4_dukemtmcreid.pt',\n device='cpu',\n half=False,\n per_class=False\n )\n rgb = np.random.randint(255, size=(640, 640, 3), dtype=np.uint8)\n det = np.array([[144, 212, 578, 480, 0.82, 0],\n [425, 281, 576, 472, 0.56, 65]])\n output = tracker.update(det, rgb)\n # Works since frame count is less than min hits (1 <= 2)\n assert output.shape == (2, 8) # two inputs should give two outputs\n output = np.flip(np.delete(output, [4, 7], axis=1), axis=0)\n assert_allclose(det, output, atol=1, rtol=7e-3, verbose=True)\n \n # Instantiate new tracker and ensure minimum number of hits works\n tracker = create_tracker(\n tracker_type='deepocsort',\n tracker_config=tracker_conf,\n reid_weights=WEIGHTS / 'mobilenetv2_x1_4_dukemtmcreid.pt',\n device='cpu',\n half=False,\n per_class=False\n )\n tracker.min_hits = 2\n output = tracker.update(np.empty((0, 6)), rgb)\n assert output.size == 0\n output = tracker.update(np.empty((0, 6)), rgb)\n assert output.size == 0\n output = tracker.update(det, rgb)\n assert output.size == 0\n> output = tracker.update(det, rgb)\n\ntests/test_python.py:86: \n_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ \nboxmot/trackers/deepocsort/deep_ocsort.py:434: in update\n matched, unmatched_dets, unmatched_trks = associate(\nboxmot/utils/association.py:146: in associate\n iou_matrix = run_asso_func(asso_func, detections, trackers, w, h)\n_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ \n\nfunc = <function giou_batch at 0x7f20a69f0700>\nargs = (array([[1.44e+02, 2.12e+02, 5.78e+02, 4.80e+02, 8.20e-01],\n [4.25e+02, 2.81e+02, 5.76e+02, 4.72e+02, 5.60e-01]]), array([[144., 212., 578., 480., 0.],\n [425., 281., 576., 472., 0.]]), 640, 640)\n\n def run_asso_func(func, *args):\n \"\"\"\n Wrapper function that checks the inputs to the association functions\n and then call either one of the iou association functions or centroid.\n \n Parameters:\n func: The batch function to call (either *iou*_batch or centroid_batch).\n *args: Variable length argument list, containing either bounding boxes and optionally size parameters.\n \"\"\"\n if func not in [iou_batch, giou_batch, diou_batch, ciou_batch, centroid_batch]:\n raise ValueError(\"Invalid function specified. Must be either '(g,d,c, )iou_batch' or 'centroid_batch'.\")\n \n if func is (iou_batch or giou_batch or diou_batch or ciou_batch):\n if len(args) != 4 or not all(isinstance(arg, (list, np.ndarray)) for arg in args[0:2]):\n raise ValueError(\"Invalid arguments for iou_batch. Expected two bounding boxes.\")\n return func(*args)\n elif func is centroid_batch:\n if len(args) != 4 or not all(isinstance(arg, (list, np.ndarray)) for arg in args[:2]) or not all(isinstance(arg, (int)) for arg in args[2:]):\n raise ValueError(\"Invalid arguments for centroid_batch. Expected two bounding boxes and two size parameters.\")\n return func(*args)\n else:\n> raise ValueError(\"No such association method\")\nE ValueError: No such association method\n\nboxmot/utils/iou.py:212: ValueError\n----------------------------- Captured stderr call -----------------------------\nDownloading...\nFrom: https://drive.google.com/uc?id=12uD5FeVqLg9-AFDju2L7SQxjmPb4zpBN\nTo: /home/runner/work/yolo_tracking/yolo_tracking/examples/weights/mobilenetv2_x1_4_dukemtmcreid.pt\n\n 0%| | 0.00/22.4M [00:00<?, ?B/s]\n 2%|\u258f | 524k/22.4M [00:00<00:05, 3.80MB/s]\n 35%|\u2588\u2588\u2588\u258c | 7.86M/22.4M [00:00<00:00, 38.7MB/s]\n 75%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 16.8M/22.4M [00:00<00:00, 58.3MB/s]\n100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 22.4M/22.4M [00:00<00:00, 54.7MB/s]\n\u001b[32m2024-01-12 20:12:35.371\u001b[0m | \u001b[32m\u001b[1mSUCCESS \u001b[0m | \u001b[36mboxmot.appearance.reid_model_factory\u001b[0m:\u001b[36mload_pretrained_weights\u001b[0m:\u001b[36m207\u001b[0m - \u001b[32m\u001b[1mSuccessfully loaded pretrained weights from \"/home/runner/work/yolo_tracking/yolo_tracking/examples/weights/mobilenetv2_x1_4_dukemtmcreid.pt\"\u001b[0m\n\u001b[32m2024-01-12 20:12:35.534\u001b[0m | \u001b[32m\u001b[1mSUCCESS \u001b[0m | \u001b[36mboxmot.appearance.reid_model_factory\u001b[0m:\u001b[36mload_pretrained_weights\u001b[0m:\u001b[36m207\u001b[0m - \u001b[32m\u001b[1mSuccessfully loaded pretrained weights from \"/home/runner/work/yolo_tracking/yolo_tracking/examples/weights/mobilenetv2_x1_4_dukemtmcreid.pt\"\u001b[0m\n=============================== warnings summary ===============================\nboxmot/utils/checks.py:5\n /home/runner/work/yolo_tracking/yolo_tracking/boxmot/utils/checks.py:5: DeprecationWarning: pkg_resources is deprecated as an API. See https://setuptools.pypa.io/en/latest/pkg_resources.html\n import pkg_resources\n\n../../../../../opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/pkg_resources/__init__.py:2868\n /opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/pkg_resources/__init__.py:2868: DeprecationWarning: Deprecated call to `pkg_resources.declare_namespace('google')`.\n Implementing implicit namespace packages (as specified in PEP 420) is preferred to `pkg_resources.declare_namespace`. See https://setuptools.pypa.io/en/latest/references/keywords.html#keyword-namespace-packages\n declare_namespace(pkg)\n\n../../../../../opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/pkg_resources/__init__.py:2868\n../../../../../opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/pkg_resources/__init__.py:2868\n../../../../../opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/pkg_resources/__init__.py:2868\n../../../../../opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/pkg_resources/__init__.py:2868\n /opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/pkg_resources/__init__.py:2868: DeprecationWarning: Deprecated call to `pkg_resources.declare_namespace('sphinxcontrib')`.\n Implementing implicit namespace packages (as specified in PEP 420) is preferred to `pkg_resources.declare_namespace`. See https://setuptools.pypa.io/en/latest/references/keywords.html#keyword-namespace-packages\n declare_namespace(pkg)\n\ntests/test_exports.py::test_export_onnx\n /home/runner/work/yolo_tracking/yolo_tracking/boxmot/appearance/backbones/osnet.py:389: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs!\n if return_featuremaps:\n\ntests/test_exports.py::test_export_openvino\n /opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/defusedxml/__init__.py:30: DeprecationWarning: defusedxml.cElementTree is deprecated, import from defusedxml.ElementTree instead.\n from . import cElementTree\n\n-- Docs: https://docs.pytest.org/en/stable/how-to/capture-warnings.html\n\n---------- coverage: platform linux, python 3.10.13-final-0 ----------\nCoverage HTML written to dir htmlcov\n\n=========================== short test summary info ============================\nFAILED tests/test_python.py::test_deepocsort_output - ValueError: No such association method\n================== 1 failed, 13 passed, 8 warnings in 33.78s ===================\n##[error]Process completed with exit code 1.\n"}], "diff": "diff --git a/boxmot/utils/iou.py b/boxmot/utils/iou.py\nindex d8bfc7e..144f40f 100644\n--- a/boxmot/utils/iou.py\n+++ b/boxmot/utils/iou.py\n@@ -200,10 +200,10 @@ def run_asso_func(func, *args):\n if func not in [iou_batch, giou_batch, diou_batch, ciou_batch, centroid_batch]:\n raise ValueError(\"Invalid function specified. Must be either '(g,d,c, )iou_batch' or 'centroid_batch'.\")\n \n- if func is (iou_batch or giou_batch or diou_batch or ciou_batch):\n+ if func in (iou_batch, giou_batch, diou_batch, ciou_batch):\n if len(args) != 4 or not all(isinstance(arg, (list, np.ndarray)) for arg in args[0:2]):\n raise ValueError(\"Invalid arguments for iou_batch. Expected two bounding boxes.\")\n- return func(*args)\n+ return func(*args[0:2])\n elif func is centroid_batch:\n if len(args) != 4 or not all(isinstance(arg, (list, np.ndarray)) for arg in args[:2]) or not all(isinstance(arg, (int)) for arg in args[2:]):\n raise ValueError(\"Invalid arguments for centroid_batch. Expected two bounding boxes and two size parameters.\")\n", "difficulty": 2, "changed_files": ["boxmot/utils/iou.py"], "commit_link": "https://github.com/mikel-brostrom/yolo_tracking/tree/03669a5d72130c57575bedd657b82c601f08a982"}
data/python/0b08b8e.json ADDED
The diff for this file is too large to render. See raw diff
 
data/python/0b93d2d.json ADDED
The diff for this file is too large to render. See raw diff
 
data/python/0f71221.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"language": "Python", "id": 65, "repo_owner": "scrapy", "repo_name": "scrapy", "head_branch": "component-getters", "workflow_name": "Checks", "workflow_filename": "checks.yml", "workflow_path": ".github/workflows/checks.yml", "contributor": "Gallaecio", "sha_fail": "0f71221cf9875ed8ef3400e1008408e79b6691e6", "sha_success": "c1ba9ccdf916b89d875628ba143dc5c9f6977430", "workflow": "name: Checks\non: [push, pull_request]\n\nconcurrency:\n group: ${{github.workflow}}-${{ github.ref }}\n cancel-in-progress: true\n\njobs:\n checks:\n runs-on: ubuntu-latest\n strategy:\n fail-fast: false\n matrix:\n include:\n - python-version: \"3.12\"\n env:\n TOXENV: pylint\n - python-version: 3.8\n env:\n TOXENV: typing\n - python-version: \"3.11\" # Keep in sync with .readthedocs.yml\n env:\n TOXENV: docs\n - python-version: \"3.12\"\n env:\n TOXENV: twinecheck\n\n steps:\n - uses: actions/checkout@v4\n\n - name: Set up Python ${{ matrix.python-version }}\n uses: actions/setup-python@v4\n with:\n python-version: ${{ matrix.python-version }}\n\n - name: Run check\n env: ${{ matrix.env }}\n run: |\n pip install -U tox\n tox\n\n pre-commit:\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v4\n - uses: pre-commit/[email protected]\n", "logs": [{"step_name": "checks (3.12, pylint)/4_Run check.txt", "log": "##[group]Run pip install -U tox\n\u001b[36;1mpip install -U tox\u001b[0m\n\u001b[36;1mtox\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.12.1/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.12.1/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.1/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.1/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.1/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.12.1/x64/lib\n TOXENV: pylint\n##[endgroup]\nCollecting tox\n Downloading tox-4.11.4-py3-none-any.whl.metadata (5.0 kB)\nCollecting cachetools>=5.3.1 (from tox)\n Downloading cachetools-5.3.2-py3-none-any.whl.metadata (5.2 kB)\nCollecting chardet>=5.2 (from tox)\n Downloading chardet-5.2.0-py3-none-any.whl.metadata (3.4 kB)\nCollecting colorama>=0.4.6 (from tox)\n Downloading colorama-0.4.6-py2.py3-none-any.whl (25 kB)\nCollecting filelock>=3.12.3 (from tox)\n Downloading filelock-3.13.1-py3-none-any.whl.metadata (2.8 kB)\nCollecting packaging>=23.1 (from tox)\n Downloading packaging-23.2-py3-none-any.whl.metadata (3.2 kB)\nCollecting platformdirs>=3.10 (from tox)\n Downloading platformdirs-4.1.0-py3-none-any.whl.metadata (11 kB)\nCollecting pluggy>=1.3 (from tox)\n Downloading pluggy-1.3.0-py3-none-any.whl.metadata (4.3 kB)\nCollecting pyproject-api>=1.6.1 (from tox)\n Downloading pyproject_api-1.6.1-py3-none-any.whl.metadata (2.8 kB)\nCollecting virtualenv>=20.24.3 (from tox)\n Downloading virtualenv-20.25.0-py3-none-any.whl.metadata (4.5 kB)\nCollecting distlib<1,>=0.3.7 (from virtualenv>=20.24.3->tox)\n Downloading distlib-0.3.8-py2.py3-none-any.whl.metadata (5.1 kB)\nDownloading tox-4.11.4-py3-none-any.whl (153 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 154.0/154.0 kB 16.6 MB/s eta 0:00:00\nDownloading cachetools-5.3.2-py3-none-any.whl (9.3 kB)\nDownloading chardet-5.2.0-py3-none-any.whl (199 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 199.4/199.4 kB 48.7 MB/s eta 0:00:00\nDownloading filelock-3.13.1-py3-none-any.whl (11 kB)\nDownloading packaging-23.2-py3-none-any.whl (53 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 53.0/53.0 kB 16.1 MB/s eta 0:00:00\nDownloading platformdirs-4.1.0-py3-none-any.whl (17 kB)\nDownloading pluggy-1.3.0-py3-none-any.whl (18 kB)\nDownloading pyproject_api-1.6.1-py3-none-any.whl (12 kB)\nDownloading virtualenv-20.25.0-py3-none-any.whl (3.8 MB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 3.8/3.8 MB 104.2 MB/s eta 0:00:00\nDownloading distlib-0.3.8-py2.py3-none-any.whl (468 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 468.9/468.9 kB 81.3 MB/s eta 0:00:00\nInstalling collected packages: distlib, pluggy, platformdirs, packaging, filelock, colorama, chardet, cachetools, virtualenv, pyproject-api, tox\nSuccessfully installed cachetools-5.3.2 chardet-5.2.0 colorama-0.4.6 distlib-0.3.8 filelock-3.13.1 packaging-23.2 platformdirs-4.1.0 pluggy-1.3.0 pyproject-api-1.6.1 tox-4.11.4 virtualenv-20.25.0\n\n[notice] A new release of pip is available: 23.3.1 -> 23.3.2\n[notice] To update, run: pip install --upgrade pip\npylint: install_deps> python -I -m pip install -ctests/upper-constraints.txt boto3 google-cloud-storage 'markupsafe<2.1.0' 'mitmproxy<8,>=4.0.4; python_version < \"3.9\" and implementation_name != \"pypy\"' Pillow pylint==3.0.1 robotexclusionrulesparser 'Twisted[http2]' -r tests/requirements.txt\n.pkg: install_requires> python -I -m pip install 'setuptools>=40.8.0' wheel\n.pkg: _optional_hooks> python /opt/hostedtoolcache/Python/3.12.1/x64/lib/python3.12/site-packages/pyproject_api/_backend.py True setuptools.build_meta __legacy__\n.pkg: get_requires_for_build_sdist> python /opt/hostedtoolcache/Python/3.12.1/x64/lib/python3.12/site-packages/pyproject_api/_backend.py True setuptools.build_meta __legacy__\n.pkg: get_requires_for_build_wheel> python /opt/hostedtoolcache/Python/3.12.1/x64/lib/python3.12/site-packages/pyproject_api/_backend.py True setuptools.build_meta __legacy__\n.pkg: install_requires_for_build_wheel> python -I -m pip install wheel\n.pkg: freeze> python -m pip freeze --all\n.pkg: pip==23.3.1,setuptools==69.0.2,wheel==0.42.0\n.pkg: prepare_metadata_for_build_wheel> python /opt/hostedtoolcache/Python/3.12.1/x64/lib/python3.12/site-packages/pyproject_api/_backend.py True setuptools.build_meta __legacy__\n.pkg: build_sdist> python /opt/hostedtoolcache/Python/3.12.1/x64/lib/python3.12/site-packages/pyproject_api/_backend.py True setuptools.build_meta __legacy__\npylint: install_package_deps> python -I -m pip install -ctests/upper-constraints.txt 'PyDispatcher>=2.0.5; platform_python_implementation == \"CPython\"' 'PyPyDispatcher>=2.1.0; platform_python_implementation == \"PyPy\"' 'Twisted>=18.9.0' 'cryptography>=36.0.0' 'cssselect>=0.9.1' 'itemadapter>=0.1.0' 'itemloaders>=1.0.1' 'lxml>=4.4.1' packaging 'parsel>=1.5.0' 'protego>=0.1.15' 'pyOpenSSL>=21.0.0' 'queuelib>=1.4.2' 'service-identity>=18.1.0' setuptools tldextract 'w3lib>=1.17.0' 'zope.interface>=5.1.0'\npylint: install_package> python -I -m pip install -ctests/upper-constraints.txt --force-reinstall --no-deps /home/runner/work/scrapy/scrapy/.tox/.tmp/package/1/Scrapy-2.11.0.tar.gz\npylint: freeze> python -m pip freeze --all\npylint: astroid==3.0.2,asttokens==2.4.1,attrs==23.1.0,Automat==22.10.0,blessed==1.20.0,boto3==1.34.3,botocore==1.34.3,bpython==0.24,Brotli==1.1.0,cachetools==5.3.2,certifi==2023.11.17,cffi==1.16.0,charset-normalizer==3.3.2,constantly==23.10.4,coverage==7.3.3,cryptography==41.0.7,cssselect==1.2.0,curtsies==0.4.2,cwcwidth==0.1.9,decorator==5.1.1,dill==0.3.7,execnet==2.0.2,executing==2.0.1,filelock==3.13.1,google-api-core==2.15.0,google-auth==2.25.2,google-cloud-core==2.4.1,google-cloud-storage==2.14.0,google-crc32c==1.5.0,google-resumable-media==2.7.0,googleapis-common-protos==1.62.0,greenlet==3.0.2,h2==4.1.0,hpack==4.0.0,hyperframe==6.0.1,hyperlink==21.0.0,idna==3.6,incremental==22.10.0,iniconfig==2.0.0,ipython==8.18.1,isort==5.13.2,itemadapter==0.8.0,itemloaders==1.1.0,jedi==0.19.1,jmespath==1.0.1,lxml==4.9.3,MarkupSafe==2.0.1,matplotlib-inline==0.1.6,mccabe==0.7.0,packaging==23.2,parsel==1.8.1,parso==0.8.3,pexpect==4.9.0,Pillow==10.1.0,pip==23.3.2,platformdirs==4.1.0,pluggy==1.3.0,priority==1.3.0,prompt-toolkit==3.0.43,Protego==0.3.0,protobuf==4.25.1,ptyprocess==0.7.0,pure-eval==0.2.2,pyasn1==0.5.1,pyasn1-modules==0.3.0,pycparser==2.21,PyDispatcher==2.0.7,pyftpdlib==1.5.9,Pygments==2.17.2,pylint==3.0.1,pyOpenSSL==23.3.0,pytest==7.4.3,pytest-cov==4.0.0,pytest-xdist==3.5.0,python-dateutil==2.8.2,pyxdg==0.28,queuelib==1.6.2,requests==2.31.0,requests-file==1.5.1,robotexclusionrulesparser==1.7.1,rsa==4.9,s3transfer==0.9.0,Scrapy @ file:///home/runner/work/scrapy/scrapy/.tox/.tmp/package/1/Scrapy-2.11.0.tar.gz#sha256=6f43aa5d37ec10736bd947dbba89df54ebc97175083ab360a721b3360f0d895b,service-identity==23.1.0,setuptools==69.0.2,six==1.16.0,stack-data==0.6.3,sybil==6.0.2,testfixtures==7.2.2,tldextract==5.1.1,tomlkit==0.12.3,traitlets==5.14.0,Twisted==23.10.0,typing_extensions==4.9.0,urllib3==2.0.7,uvloop==0.19.0,w3lib==2.1.2,wcwidth==0.2.12,zope.interface==6.1,zstandard==0.22.0\npylint: commands[0]> pylint conftest.py docs extras scrapy setup.py tests\n<unknown>:230: SyntaxWarning: invalid escape sequence '\\d'\n************* Module scrapy.crawler\nscrapy/crawler.py:182:4: W0211: Static method with 'cls' as first argument (bad-staticmethod-argument)\nscrapy/crawler.py:184:15: C0123: Use isinstance() rather than type() for a typecheck. (unidiomatic-typecheck)\n\n------------------------------------\nYour code has been rated at 10.00/10\n\npylint: exit 20 (56.60 seconds) /home/runner/work/scrapy/scrapy> pylint conftest.py docs extras scrapy setup.py tests pid=2086\n.pkg: _exit> python /opt/hostedtoolcache/Python/3.12.1/x64/lib/python3.12/site-packages/pyproject_api/_backend.py True setuptools.build_meta __legacy__\n pylint: FAIL code 20 (100.72=setup[44.12]+cmd[56.60] seconds)\n evaluation failed :( (100.86 seconds)\n##[error]Process completed with exit code 20.\n"}], "diff": "diff --git a/scrapy/crawler.py b/scrapy/crawler.py\nindex 76f4048e4..333b39f33 100644\n--- a/scrapy/crawler.py\n+++ b/scrapy/crawler.py\n@@ -179,9 +179,12 @@ class Crawler:\n yield maybeDeferred(self.engine.stop)\n \n @staticmethod\n- def _get_component(cls, components):\n+ def _get_component(component_class, components):\n for component in components:\n- if type(component) is cls:\n+ if (\n+ type(component) # pylint: disable=unidiomatic-typecheck\n+ is component_class\n+ ):\n return component\n return None\n \n", "difficulty": 0, "changed_files": ["scrapy/crawler.py"], "commit_link": "https://github.com/scrapy/scrapy/tree/0f71221cf9875ed8ef3400e1008408e79b6691e6"}
data/python/102f918.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"language": "Python", "id": 25, "repo_owner": "mindsdb", "repo_name": "mindsdb", "head_branch": "staging", "workflow_name": "MindsDB Code Checks", "workflow_filename": "code_checks.yml", "workflow_path": ".github/workflows/code_checks.yml", "contributor": "mindsdb", "sha_fail": "102f918deb2532bb7b825f00258f2c1414cf94da", "sha_success": "1cf75d7a1071fba82d4190725e984e8d55db48fb", "workflow": "name: MindsDB Code Checks\n\non:\n pull_request:\n branches: [stable, staging]\n \njobs:\n check_requirements:\n runs-on: ubuntu-latest\n steps:\n - uses: actions/[email protected]\n - name: Set up Python 3.8\n uses: actions/[email protected]\n with:\n python-version: 3.9\n - name: Check main requirements\n shell: bash\n run: |\n pip install -r requirements/requirements-dev.txt\n\n python tests/scripts/check_requirements.py\n\n - name: Check requirements files are installable\n shell: bash\n run: |\n pip install --dry-run --ignore-installed . # Install only the default handlers. We can expand this to all handlers later with: .[all_handlers_extras]\n", "logs": [{"step_name": "check_requirements/4_Check main requirements.txt", "log": "##[group]Run pip install -r requirements/requirements-dev.txt\n\u001b[36;1mpip install -r requirements/requirements-dev.txt\u001b[0m\n\u001b[36;1m\u001b[0m\n\u001b[36;1mpython tests/scripts/check_requirements.py\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.9.18/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib\n##[endgroup]\nCollecting black==23.3.0 (from -r requirements/requirements-dev.txt (line 1))\n Downloading black-23.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.7 MB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 1.7/1.7 MB 18.4 MB/s eta 0:00:00\nCollecting flake8==5.0.4 (from -r requirements/requirements-dev.txt (line 2))\n Downloading flake8-5.0.4-py2.py3-none-any.whl (61 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 61.9/61.9 kB 19.0 MB/s eta 0:00:00\nCollecting isort==5.10.1 (from -r requirements/requirements-dev.txt (line 3))\n Downloading isort-5.10.1-py3-none-any.whl (103 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 103.4/103.4 kB 26.5 MB/s eta 0:00:00\nCollecting pre-commit>=2.16.0 (from -r requirements/requirements-dev.txt (line 4))\n Downloading pre_commit-3.6.0-py2.py3-none-any.whl.metadata (1.3 kB)\nCollecting watchfiles==0.19.0 (from -r requirements/requirements-dev.txt (line 5))\n Downloading watchfiles-0.19.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.3 MB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 1.3/1.3 MB 31.1 MB/s eta 0:00:00\nRequirement already satisfied: setuptools in /opt/hostedtoolcache/Python/3.9.18/x64/lib/python3.9/site-packages (from -r requirements/requirements-dev.txt (line 6)) (58.1.0)\nCollecting wheel (from -r requirements/requirements-dev.txt (line 7))\n Downloading wheel-0.42.0-py3-none-any.whl.metadata (2.2 kB)\nCollecting deptry==0.12.0 (from -r requirements/requirements-dev.txt (line 8))\n Downloading deptry-0.12.0-py3-none-any.whl.metadata (4.6 kB)\nCollecting twine (from -r requirements/requirements-dev.txt (line 9))\n Downloading twine-4.0.2-py3-none-any.whl (36 kB)\nCollecting click>=8.0.0 (from black==23.3.0->-r requirements/requirements-dev.txt (line 1))\n Downloading click-8.1.7-py3-none-any.whl.metadata (3.0 kB)\nCollecting mypy-extensions>=0.4.3 (from black==23.3.0->-r requirements/requirements-dev.txt (line 1))\n Downloading mypy_extensions-1.0.0-py3-none-any.whl (4.7 kB)\nCollecting packaging>=22.0 (from black==23.3.0->-r requirements/requirements-dev.txt (line 1))\n Downloading packaging-23.2-py3-none-any.whl.metadata (3.2 kB)\nCollecting pathspec>=0.9.0 (from black==23.3.0->-r requirements/requirements-dev.txt (line 1))\n Downloading pathspec-0.12.1-py3-none-any.whl.metadata (21 kB)\nCollecting platformdirs>=2 (from black==23.3.0->-r requirements/requirements-dev.txt (line 1))\n Downloading platformdirs-4.1.0-py3-none-any.whl.metadata (11 kB)\nCollecting tomli>=1.1.0 (from black==23.3.0->-r requirements/requirements-dev.txt (line 1))\n Downloading tomli-2.0.1-py3-none-any.whl (12 kB)\nCollecting typing-extensions>=3.10.0.0 (from black==23.3.0->-r requirements/requirements-dev.txt (line 1))\n Downloading typing_extensions-4.9.0-py3-none-any.whl.metadata (3.0 kB)\nCollecting mccabe<0.8.0,>=0.7.0 (from flake8==5.0.4->-r requirements/requirements-dev.txt (line 2))\n Downloading mccabe-0.7.0-py2.py3-none-any.whl (7.3 kB)\nCollecting pycodestyle<2.10.0,>=2.9.0 (from flake8==5.0.4->-r requirements/requirements-dev.txt (line 2))\n Downloading pycodestyle-2.9.1-py2.py3-none-any.whl (41 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 41.5/41.5 kB 11.9 MB/s eta 0:00:00\nCollecting pyflakes<2.6.0,>=2.5.0 (from flake8==5.0.4->-r requirements/requirements-dev.txt (line 2))\n Downloading pyflakes-2.5.0-py2.py3-none-any.whl (66 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 66.1/66.1 kB 20.4 MB/s eta 0:00:00\nCollecting anyio>=3.0.0 (from watchfiles==0.19.0->-r requirements/requirements-dev.txt (line 5))\n Downloading anyio-4.2.0-py3-none-any.whl.metadata (4.6 kB)\nCollecting chardet>=4.0.0 (from deptry==0.12.0->-r requirements/requirements-dev.txt (line 8))\n Downloading chardet-5.2.0-py3-none-any.whl.metadata (3.4 kB)\nCollecting cfgv>=2.0.0 (from pre-commit>=2.16.0->-r requirements/requirements-dev.txt (line 4))\n Downloading cfgv-3.4.0-py2.py3-none-any.whl.metadata (8.5 kB)\nCollecting identify>=1.0.0 (from pre-commit>=2.16.0->-r requirements/requirements-dev.txt (line 4))\n Downloading identify-2.5.33-py2.py3-none-any.whl.metadata (4.4 kB)\nCollecting nodeenv>=0.11.1 (from pre-commit>=2.16.0->-r requirements/requirements-dev.txt (line 4))\n Downloading nodeenv-1.8.0-py2.py3-none-any.whl.metadata (21 kB)\nCollecting pyyaml>=5.1 (from pre-commit>=2.16.0->-r requirements/requirements-dev.txt (line 4))\n Downloading PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (2.1 kB)\nCollecting virtualenv>=20.10.0 (from pre-commit>=2.16.0->-r requirements/requirements-dev.txt (line 4))\n Downloading virtualenv-20.25.0-py3-none-any.whl.metadata (4.5 kB)\nCollecting pkginfo>=1.8.1 (from twine->-r requirements/requirements-dev.txt (line 9))\n Downloading pkginfo-1.9.6-py3-none-any.whl (30 kB)\nCollecting readme-renderer>=35.0 (from twine->-r requirements/requirements-dev.txt (line 9))\n Downloading readme_renderer-42.0-py3-none-any.whl.metadata (2.8 kB)\nCollecting requests>=2.20 (from twine->-r requirements/requirements-dev.txt (line 9))\n Downloading requests-2.31.0-py3-none-any.whl.metadata (4.6 kB)\nCollecting requests-toolbelt!=0.9.0,>=0.8.0 (from twine->-r requirements/requirements-dev.txt (line 9))\n Downloading requests_toolbelt-1.0.0-py2.py3-none-any.whl (54 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 54.5/54.5 kB 17.0 MB/s eta 0:00:00\nCollecting urllib3>=1.26.0 (from twine->-r requirements/requirements-dev.txt (line 9))\n Downloading urllib3-2.1.0-py3-none-any.whl.metadata (6.4 kB)\nCollecting importlib-metadata>=3.6 (from twine->-r requirements/requirements-dev.txt (line 9))\n Downloading importlib_metadata-7.0.1-py3-none-any.whl.metadata (4.9 kB)\nCollecting keyring>=15.1 (from twine->-r requirements/requirements-dev.txt (line 9))\n Downloading keyring-24.3.0-py3-none-any.whl.metadata (20 kB)\nCollecting rfc3986>=1.4.0 (from twine->-r requirements/requirements-dev.txt (line 9))\n Downloading rfc3986-2.0.0-py2.py3-none-any.whl (31 kB)\nCollecting rich>=12.0.0 (from twine->-r requirements/requirements-dev.txt (line 9))\n Downloading rich-13.7.0-py3-none-any.whl.metadata (18 kB)\nCollecting idna>=2.8 (from anyio>=3.0.0->watchfiles==0.19.0->-r requirements/requirements-dev.txt (line 5))\n Downloading idna-3.6-py3-none-any.whl.metadata (9.9 kB)\nCollecting sniffio>=1.1 (from anyio>=3.0.0->watchfiles==0.19.0->-r requirements/requirements-dev.txt (line 5))\n Downloading sniffio-1.3.0-py3-none-any.whl (10 kB)\nCollecting exceptiongroup>=1.0.2 (from anyio>=3.0.0->watchfiles==0.19.0->-r requirements/requirements-dev.txt (line 5))\n Downloading exceptiongroup-1.2.0-py3-none-any.whl.metadata (6.6 kB)\nCollecting zipp>=0.5 (from importlib-metadata>=3.6->twine->-r requirements/requirements-dev.txt (line 9))\n Downloading zipp-3.17.0-py3-none-any.whl.metadata (3.7 kB)\nCollecting jaraco.classes (from keyring>=15.1->twine->-r requirements/requirements-dev.txt (line 9))\n Downloading jaraco.classes-3.3.0-py3-none-any.whl.metadata (2.9 kB)\nCollecting SecretStorage>=3.2 (from keyring>=15.1->twine->-r requirements/requirements-dev.txt (line 9))\n Downloading SecretStorage-3.3.3-py3-none-any.whl (15 kB)\nCollecting jeepney>=0.4.2 (from keyring>=15.1->twine->-r requirements/requirements-dev.txt (line 9))\n Downloading jeepney-0.8.0-py3-none-any.whl (48 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 48.4/48.4 kB 17.1 MB/s eta 0:00:00\nCollecting nh3>=0.2.14 (from readme-renderer>=35.0->twine->-r requirements/requirements-dev.txt (line 9))\n Downloading nh3-0.2.15-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (1.7 kB)\nCollecting docutils>=0.13.1 (from readme-renderer>=35.0->twine->-r requirements/requirements-dev.txt (line 9))\n Downloading docutils-0.20.1-py3-none-any.whl.metadata (2.8 kB)\nCollecting Pygments>=2.5.1 (from readme-renderer>=35.0->twine->-r requirements/requirements-dev.txt (line 9))\n Downloading pygments-2.17.2-py3-none-any.whl.metadata (2.6 kB)\nCollecting charset-normalizer<4,>=2 (from requests>=2.20->twine->-r requirements/requirements-dev.txt (line 9))\n Downloading charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (33 kB)\nCollecting certifi>=2017.4.17 (from requests>=2.20->twine->-r requirements/requirements-dev.txt (line 9))\n Downloading certifi-2023.11.17-py3-none-any.whl.metadata (2.2 kB)\nCollecting markdown-it-py>=2.2.0 (from rich>=12.0.0->twine->-r requirements/requirements-dev.txt (line 9))\n Downloading markdown_it_py-3.0.0-py3-none-any.whl.metadata (6.9 kB)\nCollecting distlib<1,>=0.3.7 (from virtualenv>=20.10.0->pre-commit>=2.16.0->-r requirements/requirements-dev.txt (line 4))\n Downloading distlib-0.3.8-py2.py3-none-any.whl.metadata (5.1 kB)\nCollecting filelock<4,>=3.12.2 (from virtualenv>=20.10.0->pre-commit>=2.16.0->-r requirements/requirements-dev.txt (line 4))\n Downloading filelock-3.13.1-py3-none-any.whl.metadata (2.8 kB)\nCollecting mdurl~=0.1 (from markdown-it-py>=2.2.0->rich>=12.0.0->twine->-r requirements/requirements-dev.txt (line 9))\n Downloading mdurl-0.1.2-py3-none-any.whl (10.0 kB)\nCollecting cryptography>=2.0 (from SecretStorage>=3.2->keyring>=15.1->twine->-r requirements/requirements-dev.txt (line 9))\n Downloading cryptography-41.0.7-cp37-abi3-manylinux_2_28_x86_64.whl.metadata (5.2 kB)\nCollecting more-itertools (from jaraco.classes->keyring>=15.1->twine->-r requirements/requirements-dev.txt (line 9))\n Downloading more_itertools-10.1.0-py3-none-any.whl.metadata (33 kB)\nCollecting cffi>=1.12 (from cryptography>=2.0->SecretStorage>=3.2->keyring>=15.1->twine->-r requirements/requirements-dev.txt (line 9))\n Downloading cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (1.5 kB)\nCollecting pycparser (from cffi>=1.12->cryptography>=2.0->SecretStorage>=3.2->keyring>=15.1->twine->-r requirements/requirements-dev.txt (line 9))\n Downloading pycparser-2.21-py2.py3-none-any.whl (118 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 118.7/118.7 kB 35.3 MB/s eta 0:00:00\nDownloading deptry-0.12.0-py3-none-any.whl (41 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 41.7/41.7 kB 13.1 MB/s eta 0:00:00\nDownloading pre_commit-3.6.0-py2.py3-none-any.whl (204 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 204.0/204.0 kB 42.9 MB/s eta 0:00:00\nDownloading wheel-0.42.0-py3-none-any.whl (65 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 65.4/65.4 kB 22.9 MB/s eta 0:00:00\nDownloading anyio-4.2.0-py3-none-any.whl (85 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 85.5/85.5 kB 29.1 MB/s eta 0:00:00\nDownloading cfgv-3.4.0-py2.py3-none-any.whl (7.2 kB)\nDownloading chardet-5.2.0-py3-none-any.whl (199 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 199.4/199.4 kB 43.8 MB/s eta 0:00:00\nDownloading click-8.1.7-py3-none-any.whl (97 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 97.9/97.9 kB 33.7 MB/s eta 0:00:00\nDownloading identify-2.5.33-py2.py3-none-any.whl (98 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 98.9/98.9 kB 31.2 MB/s eta 0:00:00\nDownloading importlib_metadata-7.0.1-py3-none-any.whl (23 kB)\nDownloading keyring-24.3.0-py3-none-any.whl (38 kB)\nDownloading nodeenv-1.8.0-py2.py3-none-any.whl (22 kB)\nDownloading packaging-23.2-py3-none-any.whl (53 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 53.0/53.0 kB 18.2 MB/s eta 0:00:00\nDownloading pathspec-0.12.1-py3-none-any.whl (31 kB)\nDownloading platformdirs-4.1.0-py3-none-any.whl (17 kB)\nDownloading PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (738 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 738.9/738.9 kB 37.6 MB/s eta 0:00:00\nDownloading readme_renderer-42.0-py3-none-any.whl (13 kB)\nDownloading requests-2.31.0-py3-none-any.whl (62 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 62.6/62.6 kB 21.9 MB/s eta 0:00:00\nDownloading rich-13.7.0-py3-none-any.whl (240 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 240.6/240.6 kB 51.0 MB/s eta 0:00:00\nDownloading typing_extensions-4.9.0-py3-none-any.whl (32 kB)\nDownloading urllib3-2.1.0-py3-none-any.whl (104 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 104.6/104.6 kB 33.0 MB/s eta 0:00:00\nDownloading virtualenv-20.25.0-py3-none-any.whl (3.8 MB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 3.8/3.8 MB 53.1 MB/s eta 0:00:00\nDownloading certifi-2023.11.17-py3-none-any.whl (162 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 162.5/162.5 kB 43.3 MB/s eta 0:00:00\nDownloading charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (142 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 142.3/142.3 kB 40.8 MB/s eta 0:00:00\nDownloading distlib-0.3.8-py2.py3-none-any.whl (468 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 468.9/468.9 kB 68.6 MB/s eta 0:00:00\nDownloading docutils-0.20.1-py3-none-any.whl (572 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 572.7/572.7 kB 67.0 MB/s eta 0:00:00\nDownloading exceptiongroup-1.2.0-py3-none-any.whl (16 kB)\nDownloading filelock-3.13.1-py3-none-any.whl (11 kB)\nDownloading idna-3.6-py3-none-any.whl (61 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 61.6/61.6 kB 20.6 MB/s eta 0:00:00\nDownloading markdown_it_py-3.0.0-py3-none-any.whl (87 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 87.5/87.5 kB 28.4 MB/s eta 0:00:00\nDownloading nh3-0.2.15-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.7 MB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 1.7/1.7 MB 72.8 MB/s eta 0:00:00\nDownloading pygments-2.17.2-py3-none-any.whl (1.2 MB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 1.2/1.2 MB 81.6 MB/s eta 0:00:00\nDownloading zipp-3.17.0-py3-none-any.whl (7.4 kB)\nDownloading jaraco.classes-3.3.0-py3-none-any.whl (5.9 kB)\nDownloading cryptography-41.0.7-cp37-abi3-manylinux_2_28_x86_64.whl (4.4 MB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 4.4/4.4 MB 94.7 MB/s eta 0:00:00\nDownloading more_itertools-10.1.0-py3-none-any.whl (55 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 55.8/55.8 kB 17.6 MB/s eta 0:00:00\nDownloading cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (443 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 443.4/443.4 kB 72.9 MB/s eta 0:00:00\nInstalling collected packages: nh3, distlib, zipp, wheel, urllib3, typing-extensions, tomli, sniffio, rfc3986, pyyaml, Pygments, pyflakes, pycparser, pycodestyle, platformdirs, pkginfo, pathspec, packaging, nodeenv, mypy-extensions, more-itertools, mdurl, mccabe, jeepney, isort, idna, identify, filelock, exceptiongroup, docutils, click, charset-normalizer, chardet, cfgv, certifi, virtualenv, requests, readme-renderer, markdown-it-py, jaraco.classes, importlib-metadata, flake8, deptry, cffi, black, anyio, watchfiles, rich, requests-toolbelt, pre-commit, cryptography, SecretStorage, keyring, twine\nSuccessfully installed Pygments-2.17.2 SecretStorage-3.3.3 anyio-4.2.0 black-23.3.0 certifi-2023.11.17 cffi-1.16.0 cfgv-3.4.0 chardet-5.2.0 charset-normalizer-3.3.2 click-8.1.7 cryptography-41.0.7 deptry-0.12.0 distlib-0.3.8 docutils-0.20.1 exceptiongroup-1.2.0 filelock-3.13.1 flake8-5.0.4 identify-2.5.33 idna-3.6 importlib-metadata-7.0.1 isort-5.10.1 jaraco.classes-3.3.0 jeepney-0.8.0 keyring-24.3.0 markdown-it-py-3.0.0 mccabe-0.7.0 mdurl-0.1.2 more-itertools-10.1.0 mypy-extensions-1.0.0 nh3-0.2.15 nodeenv-1.8.0 packaging-23.2 pathspec-0.12.1 pkginfo-1.9.6 platformdirs-4.1.0 pre-commit-3.6.0 pycodestyle-2.9.1 pycparser-2.21 pyflakes-2.5.0 pyyaml-6.0.1 readme-renderer-42.0 requests-2.31.0 requests-toolbelt-1.0.0 rfc3986-2.0.0 rich-13.7.0 sniffio-1.3.0 tomli-2.0.1 twine-4.0.2 typing-extensions-4.9.0 urllib3-2.1.0 virtualenv-20.25.0 watchfiles-0.19.0 wheel-0.42.0 zipp-3.17.0\n\n[notice] A new release of pip is available: 23.0.1 -> 23.3.2\n[notice] To update, run: pip install --upgrade pip\n--- Checking requirements files for duplicates ---\n\n--- Checking that requirements match imports ---\n- requirements/requirements.txt\n None:None: DEP002 'type_infer' defined as a dependency but not used in the codebase\n\n\n--- Checking handlers that require other handlers ---\n##[error]Process completed with exit code 1.\n"}], "diff": "diff --git a/mindsdb/__main__.py b/mindsdb/__main__.py\nindex cac3482883..6cc92a7899 100644\n--- a/mindsdb/__main__.py\n+++ b/mindsdb/__main__.py\n@@ -35,6 +35,7 @@ from mindsdb.utilities.fs import create_dirs_recursive, clean_process_marks, cle\n from mindsdb.utilities.telemetry import telemetry_file_exists, disable_telemetry\n from mindsdb.utilities.context import context as ctx\n from mindsdb.utilities.auth import register_oauth_client, get_aws_meta_data\n+import type_infer # noqa\n \n try:\n import torch.multiprocessing as mp\n", "difficulty": 1, "changed_files": ["mindsdb/__main__.py"], "commit_link": "https://github.com/mindsdb/mindsdb/tree/102f918deb2532bb7b825f00258f2c1414cf94da"}
data/python/1168b92.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"language": "Python", "id": 56, "repo_owner": "scrapy", "repo_name": "scrapy", "head_branch": "simplify-attempt2", "workflow_name": "Checks", "workflow_filename": "checks.yml", "workflow_path": ".github/workflows/checks.yml", "contributor": "monicaq21", "sha_fail": "1168b9244d680437e2a2683294a55c0b52c118f6", "sha_success": "b5b7c99e24c722fa09c426ed0245df1ae792d486", "workflow": "name: Checks\non: [push, pull_request]\n\nconcurrency:\n group: ${{github.workflow}}-${{ github.ref }}\n cancel-in-progress: true\n\njobs:\n checks:\n runs-on: ubuntu-latest\n strategy:\n fail-fast: false\n matrix:\n include:\n - python-version: \"3.12\"\n env:\n TOXENV: pylint\n - python-version: 3.8\n env:\n TOXENV: typing\n - python-version: \"3.11\" # Keep in sync with .readthedocs.yml\n env:\n TOXENV: docs\n - python-version: \"3.12\"\n env:\n TOXENV: twinecheck\n\n steps:\n - uses: actions/checkout@v4\n\n - name: Set up Python ${{ matrix.python-version }}\n uses: actions/setup-python@v4\n with:\n python-version: ${{ matrix.python-version }}\n\n - name: Run check\n env: ${{ matrix.env }}\n run: |\n pip install -U tox\n tox\n\n pre-commit:\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v4\n - uses: pre-commit/[email protected]\n", "logs": [{"step_name": "checks (3.12, pylint)/4_Run check.txt", "log": "##[group]Run pip install -U tox\n\u001b[36;1mpip install -U tox\u001b[0m\n\u001b[36;1mtox\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.12.0/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.12.0/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.0/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.0/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.0/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.12.0/x64/lib\n TOXENV: pylint\n##[endgroup]\nCollecting tox\n Downloading tox-4.11.4-py3-none-any.whl.metadata (5.0 kB)\nCollecting cachetools>=5.3.1 (from tox)\n Downloading cachetools-5.3.2-py3-none-any.whl.metadata (5.2 kB)\nCollecting chardet>=5.2 (from tox)\n Downloading chardet-5.2.0-py3-none-any.whl.metadata (3.4 kB)\nCollecting colorama>=0.4.6 (from tox)\n Downloading colorama-0.4.6-py2.py3-none-any.whl (25 kB)\nCollecting filelock>=3.12.3 (from tox)\n Downloading filelock-3.13.1-py3-none-any.whl.metadata (2.8 kB)\nCollecting packaging>=23.1 (from tox)\n Downloading packaging-23.2-py3-none-any.whl.metadata (3.2 kB)\nCollecting platformdirs>=3.10 (from tox)\n Downloading platformdirs-4.1.0-py3-none-any.whl.metadata (11 kB)\nCollecting pluggy>=1.3 (from tox)\n Downloading pluggy-1.3.0-py3-none-any.whl.metadata (4.3 kB)\nCollecting pyproject-api>=1.6.1 (from tox)\n Downloading pyproject_api-1.6.1-py3-none-any.whl.metadata (2.8 kB)\nCollecting virtualenv>=20.24.3 (from tox)\n Downloading virtualenv-20.25.0-py3-none-any.whl.metadata (4.5 kB)\nCollecting distlib<1,>=0.3.7 (from virtualenv>=20.24.3->tox)\n Downloading distlib-0.3.7-py2.py3-none-any.whl.metadata (5.1 kB)\nDownloading tox-4.11.4-py3-none-any.whl (153 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 154.0/154.0 kB 19.9 MB/s eta 0:00:00\nDownloading cachetools-5.3.2-py3-none-any.whl (9.3 kB)\nDownloading chardet-5.2.0-py3-none-any.whl (199 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 199.4/199.4 kB 51.7 MB/s eta 0:00:00\nDownloading filelock-3.13.1-py3-none-any.whl (11 kB)\nDownloading packaging-23.2-py3-none-any.whl (53 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 53.0/53.0 kB 16.5 MB/s eta 0:00:00\nDownloading platformdirs-4.1.0-py3-none-any.whl (17 kB)\nDownloading pluggy-1.3.0-py3-none-any.whl (18 kB)\nDownloading pyproject_api-1.6.1-py3-none-any.whl (12 kB)\nDownloading virtualenv-20.25.0-py3-none-any.whl (3.8 MB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 3.8/3.8 MB 117.4 MB/s eta 0:00:00\nDownloading distlib-0.3.7-py2.py3-none-any.whl (468 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 468.9/468.9 kB 82.7 MB/s eta 0:00:00\nInstalling collected packages: distlib, pluggy, platformdirs, packaging, filelock, colorama, chardet, cachetools, virtualenv, pyproject-api, tox\nSuccessfully installed cachetools-5.3.2 chardet-5.2.0 colorama-0.4.6 distlib-0.3.7 filelock-3.13.1 packaging-23.2 platformdirs-4.1.0 pluggy-1.3.0 pyproject-api-1.6.1 tox-4.11.4 virtualenv-20.25.0\npylint: install_deps> python -I -m pip install -ctests/upper-constraints.txt boto3 google-cloud-storage 'markupsafe<2.1.0' 'mitmproxy<8,>=4.0.4; python_version < \"3.9\" and implementation_name != \"pypy\"' Pillow pylint==3.0.1 robotexclusionrulesparser 'Twisted[http2]' -r tests/requirements.txt\n.pkg: install_requires> python -I -m pip install 'setuptools>=40.8.0' wheel\n.pkg: _optional_hooks> python /opt/hostedtoolcache/Python/3.12.0/x64/lib/python3.12/site-packages/pyproject_api/_backend.py True setuptools.build_meta __legacy__\n.pkg: get_requires_for_build_sdist> python /opt/hostedtoolcache/Python/3.12.0/x64/lib/python3.12/site-packages/pyproject_api/_backend.py True setuptools.build_meta __legacy__\n.pkg: get_requires_for_build_wheel> python /opt/hostedtoolcache/Python/3.12.0/x64/lib/python3.12/site-packages/pyproject_api/_backend.py True setuptools.build_meta __legacy__\n.pkg: install_requires_for_build_wheel> python -I -m pip install wheel\n.pkg: freeze> python -m pip freeze --all\n.pkg: pip==23.3.1,setuptools==69.0.2,wheel==0.42.0\n.pkg: prepare_metadata_for_build_wheel> python /opt/hostedtoolcache/Python/3.12.0/x64/lib/python3.12/site-packages/pyproject_api/_backend.py True setuptools.build_meta __legacy__\n.pkg: build_sdist> python /opt/hostedtoolcache/Python/3.12.0/x64/lib/python3.12/site-packages/pyproject_api/_backend.py True setuptools.build_meta __legacy__\npylint: install_package_deps> python -I -m pip install -ctests/upper-constraints.txt 'PyDispatcher>=2.0.5; platform_python_implementation == \"CPython\"' 'PyPyDispatcher>=2.1.0; platform_python_implementation == \"PyPy\"' 'Twisted>=18.9.0' 'cryptography>=36.0.0' 'cssselect>=0.9.1' 'itemadapter>=0.1.0' 'itemloaders>=1.0.1' 'lxml>=4.4.1' packaging 'parsel>=1.5.0' 'protego>=0.1.15' 'pyOpenSSL>=21.0.0' 'queuelib>=1.4.2' 'service-identity>=18.1.0' setuptools tldextract 'w3lib>=1.17.0' 'zope.interface>=5.1.0'\npylint: install_package> python -I -m pip install -ctests/upper-constraints.txt --force-reinstall --no-deps /home/runner/work/scrapy/scrapy/.tox/.tmp/package/1/Scrapy-2.11.0.tar.gz\npylint: freeze> python -m pip freeze --all\npylint: annotated-types==0.6.0,astroid==3.0.1,asttokens==2.4.1,attrs==23.1.0,Automat==22.10.0,blessed==1.20.0,boto3==1.33.11,botocore==1.33.11,bpython==0.24,Brotli==1.1.0,cachetools==5.3.2,Cerberus==1.3.5,certifi==2023.11.17,cffi==1.16.0,charset-normalizer==3.3.2,constantly==23.10.4,coverage==7.3.2,cryptography==41.0.7,cssselect==1.2.0,curtsies==0.4.2,cwcwidth==0.1.9,decorator==5.1.1,dill==0.3.7,distlib==0.3.7,docopt==0.6.2,execnet==2.0.2,executing==2.0.1,filelock==3.13.1,google-api-core==2.15.0,google-auth==2.25.2,google-cloud-core==2.4.1,google-cloud-storage==2.13.0,google-crc32c==1.5.0,google-resumable-media==2.6.0,googleapis-common-protos==1.62.0,greenlet==3.0.2,h2==4.1.0,hpack==4.0.0,hyperframe==6.0.1,hyperlink==21.0.0,idna==3.6,incremental==22.10.0,iniconfig==2.0.0,ipython==8.18.1,isort==5.13.0,itemadapter==0.8.0,itemloaders==1.1.0,jedi==0.19.1,jmespath==1.0.1,lxml==4.9.3,MarkupSafe==2.0.1,matplotlib-inline==0.1.6,mccabe==0.7.0,packaging==23.2,parsel==1.8.1,parso==0.8.3,pep517==0.13.1,pexpect==4.9.0,Pillow==10.1.0,pip==23.3.1,pip-api==0.0.30,pipreqs==0.4.13,platformdirs==4.1.0,plette==0.4.4,pluggy==1.3.0,priority==1.3.0,prompt-toolkit==3.0.41,Protego==0.3.0,protobuf==4.25.1,ptyprocess==0.7.0,pure-eval==0.2.2,pyasn1==0.5.1,pyasn1-modules==0.3.0,pycparser==2.21,pydantic==2.5.2,pydantic_core==2.14.5,PyDispatcher==2.0.7,pyftpdlib==1.5.9,Pygments==2.17.2,pylint==3.0.1,pyOpenSSL==23.3.0,pytest==7.4.3,pytest-cov==4.0.0,pytest-xdist==3.5.0,python-dateutil==2.8.2,pyxdg==0.28,queuelib==1.6.2,requests==2.31.0,requests-file==1.5.1,requirementslib==3.0.0,robotexclusionrulesparser==1.7.1,rsa==4.9,s3transfer==0.8.2,Scrapy @ file:///home/runner/work/scrapy/scrapy/.tox/.tmp/package/1/Scrapy-2.11.0.tar.gz#sha256=badae26f961983adc76ef32b8b07c6ba5b979a31f891e5f2e57590381cc5b815,service-identity==23.1.0,setuptools==69.0.2,six==1.16.0,stack-data==0.6.3,sybil==6.0.2,testfixtures==7.2.2,tldextract==5.1.1,tomlkit==0.12.3,traitlets==5.14.0,Twisted==23.10.0,typing_extensions==4.9.0,urllib3==2.0.7,uvloop==0.19.0,w3lib==2.1.2,wcwidth==0.2.12,yarg==0.1.9,zope.interface==6.1,zstandard==0.22.0\npylint: commands[0]> pylint conftest.py docs extras scrapy setup.py tests\n<unknown>:230: SyntaxWarning: invalid escape sequence '\\d'\n************* Module scrapy.crawler\nscrapy/crawler.py:112:37: W1117: 'crawler' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\n************* Module scrapy.core.engine\nscrapy/core/engine.py:361:20: W1117: 'crawler' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\n************* Module scrapy.core.scheduler\nscrapy/core/scheduler.py:325:15: W1117: 'crawler' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\nscrapy/core/scheduler.py:336:12: W1117: 'crawler' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\n************* Module scrapy.core.downloader.contextfactory\nscrapy/core/downloader/contextfactory.py:168:26: W1117: 'objcls' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\nscrapy/core/downloader/contextfactory.py:168:26: W1117: 'crawler' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\nscrapy/core/downloader/contextfactory.py:174:26: W1117: 'objcls' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\nscrapy/core/downloader/contextfactory.py:174:26: W1117: 'crawler' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\n************* Module scrapy.core.downloader.handlers\nscrapy/core/downloader/handlers/__init__.py:58:17: W1117: 'objcls' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\nscrapy/core/downloader/handlers/__init__.py:58:17: W1117: 'crawler' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\n************* Module scrapy.core.downloader.handlers.http10\nscrapy/core/downloader/handlers/http10.py:33:37: W1117: 'objcls' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\nscrapy/core/downloader/handlers/http10.py:33:37: W1117: 'crawler' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\n************* Module scrapy.core.downloader.handlers.s3\nscrapy/core/downloader/handlers/s3.py:53:24: W1117: 'objcls' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\nscrapy/core/downloader/handlers/s3.py:53:24: W1117: 'crawler' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\n************* Module tests.test_downloader_handlers\ntests/test_downloader_handlers.py:831:22: W1117: 'objcls' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\ntests/test_downloader_handlers.py:831:22: W1117: 'crawler' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\ntests/test_downloader_handlers.py:861:17: W1117: 'objcls' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\ntests/test_downloader_handlers.py:861:17: W1117: 'crawler' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\ntests/test_downloader_handlers.py:888:12: W1117: 'objcls' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\ntests/test_downloader_handlers.py:888:12: W1117: 'crawler' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\n************* Module tests.test_webclient\ntests/test_webclient.py:473:33: W1117: 'settings' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\ntests/test_webclient.py:485:33: W1117: 'settings' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\n\n-----------------------------------\nYour code has been rated at 9.99/10\n\npylint: exit 4 (53.83 seconds) /home/runner/work/scrapy/scrapy> pylint conftest.py docs extras scrapy setup.py tests pid=2037\n.pkg: _exit> python /opt/hostedtoolcache/Python/3.12.0/x64/lib/python3.12/site-packages/pyproject_api/_backend.py True setuptools.build_meta __legacy__\n pylint: FAIL code 4 (100.80=setup[46.97]+cmd[53.83] seconds)\n evaluation failed :( (100.88 seconds)\n##[error]Process completed with exit code 4.\n"}, {"step_name": "checks (3.8, typing)/4_Run check.txt", "log": "##[group]Run pip install -U tox\n\u001b[36;1mpip install -U tox\u001b[0m\n\u001b[36;1mtox\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.8.18/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.8.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.8.18/x64/lib\n TOXENV: typing\n##[endgroup]\nCollecting tox\n Downloading tox-4.11.4-py3-none-any.whl.metadata (5.0 kB)\nCollecting cachetools>=5.3.1 (from tox)\n Downloading cachetools-5.3.2-py3-none-any.whl.metadata (5.2 kB)\nCollecting chardet>=5.2 (from tox)\n Downloading chardet-5.2.0-py3-none-any.whl.metadata (3.4 kB)\nCollecting colorama>=0.4.6 (from tox)\n Downloading colorama-0.4.6-py2.py3-none-any.whl (25 kB)\nCollecting filelock>=3.12.3 (from tox)\n Downloading filelock-3.13.1-py3-none-any.whl.metadata (2.8 kB)\nCollecting packaging>=23.1 (from tox)\n Downloading packaging-23.2-py3-none-any.whl.metadata (3.2 kB)\nCollecting platformdirs>=3.10 (from tox)\n Downloading platformdirs-4.1.0-py3-none-any.whl.metadata (11 kB)\nCollecting pluggy>=1.3 (from tox)\n Downloading pluggy-1.3.0-py3-none-any.whl.metadata (4.3 kB)\nCollecting pyproject-api>=1.6.1 (from tox)\n Downloading pyproject_api-1.6.1-py3-none-any.whl.metadata (2.8 kB)\nCollecting tomli>=2.0.1 (from tox)\n Downloading tomli-2.0.1-py3-none-any.whl (12 kB)\nCollecting virtualenv>=20.24.3 (from tox)\n Downloading virtualenv-20.25.0-py3-none-any.whl.metadata (4.5 kB)\nCollecting distlib<1,>=0.3.7 (from virtualenv>=20.24.3->tox)\n Downloading distlib-0.3.7-py2.py3-none-any.whl.metadata (5.1 kB)\nDownloading tox-4.11.4-py3-none-any.whl (153 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 154.0/154.0 kB 8.4 MB/s eta 0:00:00\nDownloading cachetools-5.3.2-py3-none-any.whl (9.3 kB)\nDownloading chardet-5.2.0-py3-none-any.whl (199 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 199.4/199.4 kB 32.2 MB/s eta 0:00:00\nDownloading filelock-3.13.1-py3-none-any.whl (11 kB)\nDownloading packaging-23.2-py3-none-any.whl (53 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 53.0/53.0 kB 17.1 MB/s eta 0:00:00\nDownloading platformdirs-4.1.0-py3-none-any.whl (17 kB)\nDownloading pluggy-1.3.0-py3-none-any.whl (18 kB)\nDownloading pyproject_api-1.6.1-py3-none-any.whl (12 kB)\nDownloading virtualenv-20.25.0-py3-none-any.whl (3.8 MB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 3.8/3.8 MB 82.9 MB/s eta 0:00:00\nDownloading distlib-0.3.7-py2.py3-none-any.whl (468 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 468.9/468.9 kB 79.6 MB/s eta 0:00:00\nInstalling collected packages: distlib, tomli, pluggy, platformdirs, packaging, filelock, colorama, chardet, cachetools, virtualenv, pyproject-api, tox\nSuccessfully installed cachetools-5.3.2 chardet-5.2.0 colorama-0.4.6 distlib-0.3.7 filelock-3.13.1 packaging-23.2 platformdirs-4.1.0 pluggy-1.3.0 pyproject-api-1.6.1 tomli-2.0.1 tox-4.11.4 virtualenv-20.25.0\n\n[notice] A new release of pip is available: 23.0.1 -> 23.3.1\n[notice] To update, run: pip install --upgrade pip\ntyping: install_deps> python -I -m pip install -ctests/upper-constraints.txt mypy==1.6.1 types-attrs==19.1.0 types-lxml==2023.10.21 types-Pillow==10.1.0.0 types-Pygments==2.16.0.0 types-pyOpenSSL==23.3.0.0 types-setuptools==68.2.0.0 typing-extensions==4.8.0 'w3lib>=2.1.2'\n.pkg: install_requires> python -I -m pip install 'setuptools>=40.8.0' wheel\n.pkg: _optional_hooks> python /opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/site-packages/pyproject_api/_backend.py True setuptools.build_meta __legacy__\n.pkg: get_requires_for_build_sdist> python /opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/site-packages/pyproject_api/_backend.py True setuptools.build_meta __legacy__\n.pkg: get_requires_for_build_wheel> python /opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/site-packages/pyproject_api/_backend.py True setuptools.build_meta __legacy__\n.pkg: install_requires_for_build_wheel> python -I -m pip install wheel\n.pkg: freeze> python -m pip freeze --all\n.pkg: pip==23.3.1,setuptools==69.0.2,wheel==0.42.0\n.pkg: prepare_metadata_for_build_wheel> python /opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/site-packages/pyproject_api/_backend.py True setuptools.build_meta __legacy__\n.pkg: build_sdist> python /opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/site-packages/pyproject_api/_backend.py True setuptools.build_meta __legacy__\ntyping: install_package_deps> python -I -m pip install -ctests/upper-constraints.txt 'PyDispatcher>=2.0.5; platform_python_implementation == \"CPython\"' 'PyPyDispatcher>=2.1.0; platform_python_implementation == \"PyPy\"' 'Twisted>=18.9.0' 'cryptography>=36.0.0' 'cssselect>=0.9.1' 'itemadapter>=0.1.0' 'itemloaders>=1.0.1' 'lxml>=4.4.1' packaging 'parsel>=1.5.0' 'protego>=0.1.15' 'pyOpenSSL>=21.0.0' 'queuelib>=1.4.2' 'service-identity>=18.1.0' setuptools tldextract 'w3lib>=1.17.0' 'zope.interface>=5.1.0'\ntyping: install_package> python -I -m pip install -ctests/upper-constraints.txt --force-reinstall --no-deps /home/runner/work/scrapy/scrapy/.tox/.tmp/package/1/Scrapy-2.11.0.tar.gz\ntyping: freeze> python -m pip freeze --all\ntyping: attrs==23.1.0,Automat==22.10.0,certifi==2023.11.17,cffi==1.16.0,charset-normalizer==3.3.2,constantly==23.10.4,cryptography==41.0.7,cssselect==1.2.0,filelock==3.13.1,hyperlink==21.0.0,idna==3.6,incremental==22.10.0,itemadapter==0.8.0,itemloaders==1.1.0,jmespath==1.0.1,lxml==4.9.3,mypy==1.6.1,mypy-extensions==1.0.0,packaging==23.2,parsel==1.8.1,pip==23.3.1,Protego==0.3.0,pyasn1==0.5.1,pyasn1-modules==0.3.0,pycparser==2.21,PyDispatcher==2.0.7,pyOpenSSL==23.3.0,queuelib==1.6.2,requests==2.31.0,requests-file==1.5.1,Scrapy @ file:///home/runner/work/scrapy/scrapy/.tox/.tmp/package/1/Scrapy-2.11.0.tar.gz#sha256=ebe501ee4a51f734f99dff82a0f62429c4051d3976e2377b4c07e9f5fb55d533,service-identity==23.1.0,setuptools==69.0.2,six==1.16.0,tldextract==5.1.1,tomli==2.0.1,Twisted==23.10.0,types-attrs==19.1.0,types-beautifulsoup4==4.12.0.7,types-docutils==0.20.0.3,types-html5lib==1.1.11.15,types-lxml==2023.10.21,types-Pillow==10.1.0.0,types-Pygments==2.16.0.0,types-pyOpenSSL==23.3.0.0,types-setuptools==68.2.0.0,typing_extensions==4.8.0,urllib3==2.1.0,w3lib==2.1.2,wheel==0.42.0,zope.interface==6.1\ntyping: commands[0]> mypy scrapy tests\nscrapy/core/downloader/handlers/__init__.py:58: error: Too few arguments for \"build_from_crawler\" [call-arg]\nscrapy/core/engine.py:361: error: Too few arguments for \"build_from_crawler\" [call-arg]\nscrapy/crawler.py:112: error: Too few arguments for \"build_from_crawler\" [call-arg]\nFound 3 errors in 3 files (checked 333 source files)\ntyping: exit 1 (8.99 seconds) /home/runner/work/scrapy/scrapy> mypy scrapy tests pid=1984\n.pkg: _exit> python /opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/site-packages/pyproject_api/_backend.py True setuptools.build_meta __legacy__\n typing: FAIL code 1 (25.59=setup[16.60]+cmd[8.99] seconds)\n evaluation failed :( (25.71 seconds)\n##[error]Process completed with exit code 1.\n"}], "diff": "diff --git a/scrapy/core/downloader/contextfactory.py b/scrapy/core/downloader/contextfactory.py\nindex 9047a5b83..05d6e7a09 100644\n--- a/scrapy/core/downloader/contextfactory.py\n+++ b/scrapy/core/downloader/contextfactory.py\n@@ -166,14 +166,14 @@ def load_context_factory_from_settings(settings, crawler):\n # try method-aware context factory\n try:\n context_factory = build_from_crawler(\n- objcls=context_factory_cls,\n- crawler=crawler,\n+ context_factory_cls,\n+ crawler,\n method=ssl_method,\n )\n except TypeError:\n context_factory = build_from_crawler(\n- objcls=context_factory_cls,\n- crawler=crawler,\n+ context_factory_cls,\n+ crawler,\n )\n msg = (\n f\"{settings['DOWNLOADER_CLIENTCONTEXTFACTORY']} does not accept \"\ndiff --git a/scrapy/core/downloader/handlers/__init__.py b/scrapy/core/downloader/handlers/__init__.py\nindex 466393b80..416669b7f 100644\n--- a/scrapy/core/downloader/handlers/__init__.py\n+++ b/scrapy/core/downloader/handlers/__init__.py\n@@ -56,8 +56,8 @@ class DownloadHandlers:\n if skip_lazy and getattr(dhcls, \"lazy\", True):\n return None\n dh = build_from_crawler(\n- objcls=dhcls,\n- crawler=self._crawler,\n+ dhcls,\n+ self._crawler,\n )\n except NotConfigured as ex:\n self._notconfigured[scheme] = str(ex)\ndiff --git a/scrapy/core/downloader/handlers/http10.py b/scrapy/core/downloader/handlers/http10.py\nindex 817a420c0..b6ac7a251 100644\n--- a/scrapy/core/downloader/handlers/http10.py\n+++ b/scrapy/core/downloader/handlers/http10.py\n@@ -31,8 +31,8 @@ class HTTP10DownloadHandler:\n host, port = to_unicode(factory.host), factory.port\n if factory.scheme == b\"https\":\n client_context_factory = build_from_crawler(\n- objcls=self.ClientContextFactory,\n- crawler=self._crawler,\n+ self.ClientContextFactory,\n+ self._crawler,\n )\n return reactor.connectSSL(host, port, factory, client_context_factory)\n return reactor.connectTCP(host, port, factory)\ndiff --git a/scrapy/core/downloader/handlers/s3.py b/scrapy/core/downloader/handlers/s3.py\nindex a37f29aac..1f7533759 100644\n--- a/scrapy/core/downloader/handlers/s3.py\n+++ b/scrapy/core/downloader/handlers/s3.py\n@@ -51,8 +51,8 @@ class S3DownloadHandler:\n )\n \n _http_handler = build_from_crawler(\n- objcls=httpdownloadhandler,\n- crawler=crawler,\n+ httpdownloadhandler,\n+ crawler,\n )\n self._download_http = _http_handler.download_request\n \ndiff --git a/scrapy/core/engine.py b/scrapy/core/engine.py\nindex 281dc8a54..545cd401f 100644\n--- a/scrapy/core/engine.py\n+++ b/scrapy/core/engine.py\n@@ -358,7 +358,7 @@ class ExecutionEngine:\n raise RuntimeError(f\"No free spider slot when opening {spider.name!r}\")\n logger.info(\"Spider opened\", extra={\"spider\": spider})\n nextcall = CallLaterOnce(self._next_request)\n- scheduler = build_from_crawler(self.scheduler_cls, crawler=self.crawler)\n+ scheduler = build_from_crawler(self.scheduler_cls, self.crawler)\n start_requests = yield self.scraper.spidermw.process_start_requests(\n start_requests, spider\n )\ndiff --git a/scrapy/core/scheduler.py b/scrapy/core/scheduler.py\nindex bf027e1f9..f41b83a67 100644\n--- a/scrapy/core/scheduler.py\n+++ b/scrapy/core/scheduler.py\n@@ -324,7 +324,7 @@ class Scheduler(BaseScheduler):\n \"\"\"Create a new priority queue instance, with in-memory storage\"\"\"\n return build_from_crawler(\n self.pqclass,\n- crawler=self.crawler,\n+ self.crawler,\n downstream_queue_cls=self.mqclass,\n key=\"\",\n )\n@@ -335,7 +335,7 @@ class Scheduler(BaseScheduler):\n state = self._read_dqs_state(self.dqdir)\n q = build_from_crawler(\n self.pqclass,\n- crawler=self.crawler,\n+ self.crawler,\n downstream_queue_cls=self.dqclass,\n key=self.dqdir,\n startprios=state,\ndiff --git a/scrapy/crawler.py b/scrapy/crawler.py\nindex b7a6df2f3..844d5f759 100644\n--- a/scrapy/crawler.py\n+++ b/scrapy/crawler.py\n@@ -111,7 +111,7 @@ class Crawler:\n \n self.request_fingerprinter = build_from_crawler(\n load_object(self.settings[\"REQUEST_FINGERPRINTER_CLASS\"]),\n- crawler=self,\n+ self,\n )\n \n reactor_class: str = self.settings[\"TWISTED_REACTOR\"]\ndiff --git a/tests/test_downloader_handlers.py b/tests/test_downloader_handlers.py\nindex 37a53643b..dd07d33f1 100644\n--- a/tests/test_downloader_handlers.py\n+++ b/tests/test_downloader_handlers.py\n@@ -829,8 +829,8 @@ class S3AnonTestCase(unittest.TestCase):\n skip_if_no_boto()\n crawler = get_crawler()\n self.s3reqh = build_from_crawler(\n- objcls=S3DownloadHandler,\n- crawler=crawler,\n+ S3DownloadHandler,\n+ crawler,\n httpdownloadhandler=HttpDownloadHandlerMock,\n # anon=True, # implicit\n )\n@@ -859,8 +859,8 @@ class S3TestCase(unittest.TestCase):\n skip_if_no_boto()\n crawler = get_crawler()\n s3reqh = build_from_crawler(\n- objcls=S3DownloadHandler,\n- crawler=crawler,\n+ S3DownloadHandler,\n+ crawler,\n aws_access_key_id=self.AWS_ACCESS_KEY_ID,\n aws_secret_access_key=self.AWS_SECRET_ACCESS_KEY,\n httpdownloadhandler=HttpDownloadHandlerMock,\n@@ -886,8 +886,8 @@ class S3TestCase(unittest.TestCase):\n try:\n crawler = get_crawler()\n build_from_crawler(\n- objcls=S3DownloadHandler,\n- crawler=crawler,\n+ S3DownloadHandler,\n+ crawler,\n extra_kw=True,\n )\n except Exception as e:\ndiff --git a/tests/test_webclient.py b/tests/test_webclient.py\nindex a69d9c1b0..d4b6ba15b 100644\n--- a/tests/test_webclient.py\n+++ b/tests/test_webclient.py\n@@ -471,7 +471,7 @@ class WebClientCustomCiphersSSLTestCase(WebClientSSLTestCase):\n s = \"0123456789\" * 10\n settings = Settings({\"DOWNLOADER_CLIENT_TLS_CIPHERS\": self.custom_ciphers})\n client_context_factory = build_from_settings(\n- ScrapyClientContextFactory, settings=settings\n+ ScrapyClientContextFactory, settings\n )\n return getPage(\n self.getURL(\"payload\"), body=s, contextFactory=client_context_factory\n@@ -483,7 +483,7 @@ class WebClientCustomCiphersSSLTestCase(WebClientSSLTestCase):\n {\"DOWNLOADER_CLIENT_TLS_CIPHERS\": \"ECDHE-RSA-AES256-GCM-SHA384\"}\n )\n client_context_factory = build_from_settings(\n- ScrapyClientContextFactory, settings=settings\n+ ScrapyClientContextFactory, settings\n )\n d = getPage(\n self.getURL(\"payload\"), body=s, contextFactory=client_context_factory\n", "difficulty": 2, "changed_files": ["scrapy/core/downloader/contextfactory.py", "scrapy/core/downloader/handlers/__init__.py", "scrapy/core/downloader/handlers/http10.py", "scrapy/core/downloader/handlers/s3.py", "scrapy/core/engine.py", "scrapy/core/scheduler.py", "scrapy/crawler.py", "tests/test_downloader_handlers.py", "tests/test_webclient.py"], "commit_link": "https://github.com/scrapy/scrapy/tree/1168b9244d680437e2a2683294a55c0b52c118f6"}
data/python/16a0c04.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"language": "Python", "id": 70, "repo_owner": "dask", "repo_name": "dask", "head_branch": "milesgranger/10801-more-deprecations", "workflow_name": "Linting", "workflow_filename": "pre-commit.yml", "workflow_path": ".github/workflows/pre-commit.yml", "contributor": "milesgranger", "sha_fail": "16a0c04d06205527ec5e379df2596b399ee5dadc", "sha_success": "3716f88d477a969d0910c75b4268949af11c0e6e", "workflow": "name: Linting\n\non:\n push:\n branches: main\n pull_request:\n branches: main\n\njobs:\n checks:\n name: pre-commit hooks\n runs-on: ubuntu-latest\n steps:\n - uses: actions/[email protected]\n - uses: actions/setup-python@v5\n with:\n python-version: '3.9'\n - uses: pre-commit/[email protected]\n", "logs": [{"step_name": "pre-commit hooks/4_Run [email protected]", "log": "##[group]Run pre-commit/[email protected]\nwith:\n extra_args: --all-files\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.9.18/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib\n##[endgroup]\n##[group]Run python -m pip install pre-commit\n\u001b[36;1mpython -m pip install pre-commit\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.9.18/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib\n##[endgroup]\nCollecting pre-commit\n Downloading pre_commit-3.6.0-py2.py3-none-any.whl.metadata (1.3 kB)\nCollecting cfgv>=2.0.0 (from pre-commit)\n Downloading cfgv-3.4.0-py2.py3-none-any.whl.metadata (8.5 kB)\nCollecting identify>=1.0.0 (from pre-commit)\n Downloading identify-2.5.33-py2.py3-none-any.whl.metadata (4.4 kB)\nCollecting nodeenv>=0.11.1 (from pre-commit)\n Downloading nodeenv-1.8.0-py2.py3-none-any.whl.metadata (21 kB)\nCollecting pyyaml>=5.1 (from pre-commit)\n Downloading PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (2.1 kB)\nCollecting virtualenv>=20.10.0 (from pre-commit)\n Downloading virtualenv-20.25.0-py3-none-any.whl.metadata (4.5 kB)\nRequirement already satisfied: setuptools in /opt/hostedtoolcache/Python/3.9.18/x64/lib/python3.9/site-packages (from nodeenv>=0.11.1->pre-commit) (58.1.0)\nCollecting distlib<1,>=0.3.7 (from virtualenv>=20.10.0->pre-commit)\n Downloading distlib-0.3.8-py2.py3-none-any.whl.metadata (5.1 kB)\nCollecting filelock<4,>=3.12.2 (from virtualenv>=20.10.0->pre-commit)\n Downloading filelock-3.13.1-py3-none-any.whl.metadata (2.8 kB)\nCollecting platformdirs<5,>=3.9.1 (from virtualenv>=20.10.0->pre-commit)\n Downloading platformdirs-4.1.0-py3-none-any.whl.metadata (11 kB)\nDownloading pre_commit-3.6.0-py2.py3-none-any.whl (204 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 204.0/204.0 kB 19.7 MB/s eta 0:00:00\nDownloading cfgv-3.4.0-py2.py3-none-any.whl (7.2 kB)\nDownloading identify-2.5.33-py2.py3-none-any.whl (98 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 98.9/98.9 kB 27.0 MB/s eta 0:00:00\nDownloading nodeenv-1.8.0-py2.py3-none-any.whl (22 kB)\nDownloading PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (738 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 738.9/738.9 kB 80.2 MB/s eta 0:00:00\nDownloading virtualenv-20.25.0-py3-none-any.whl (3.8 MB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 3.8/3.8 MB 114.1 MB/s eta 0:00:00\nDownloading distlib-0.3.8-py2.py3-none-any.whl (468 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 468.9/468.9 kB 78.9 MB/s eta 0:00:00\nDownloading filelock-3.13.1-py3-none-any.whl (11 kB)\nDownloading platformdirs-4.1.0-py3-none-any.whl (17 kB)\nInstalling collected packages: distlib, pyyaml, platformdirs, nodeenv, identify, filelock, cfgv, virtualenv, pre-commit\nSuccessfully installed cfgv-3.4.0 distlib-0.3.8 filelock-3.13.1 identify-2.5.33 nodeenv-1.8.0 platformdirs-4.1.0 pre-commit-3.6.0 pyyaml-6.0.1 virtualenv-20.25.0\n\n[notice] A new release of pip is available: 23.0.1 -> 23.3.2\n[notice] To update, run: pip install --upgrade pip\n##[group]Run python -m pip freeze --local\n\u001b[36;1mpython -m pip freeze --local\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.9.18/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib\n##[endgroup]\ncfgv==3.4.0\ndistlib==0.3.8\nfilelock==3.13.1\nidentify==2.5.33\nnodeenv==1.8.0\nplatformdirs==4.1.0\npre-commit==3.6.0\nPyYAML==6.0.1\nvirtualenv==20.25.0\n##[group]Run actions/cache@v3\nwith:\n path: ~/.cache/pre-commit\n key: pre-commit-3|/opt/hostedtoolcache/Python/3.9.18/x64|2f6023be5c7e4f75d9ed9a00c3fcb4c3fa7d8493a70885078264f9accc27f632\n enableCrossOsArchive: false\n fail-on-cache-miss: false\n lookup-only: false\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.9.18/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib\n##[endgroup]\nReceived 0 of 69094681 (0.0%), 0.0 MBs/sec\nCache Size: ~66 MB (69094681 B)\n[command]/usr/bin/tar -xf /home/runner/work/_temp/2e80fd5c-4883-4bdb-be22-ed0a545b7db7/cache.tzst -P -C /home/runner/work/dask/dask --use-compress-program unzstd\nReceived 69094681 of 69094681 (100.0%), 32.9 MBs/sec\nCache restored successfully\nCache restored from key: pre-commit-3|/opt/hostedtoolcache/Python/3.9.18/x64|2f6023be5c7e4f75d9ed9a00c3fcb4c3fa7d8493a70885078264f9accc27f632\n##[group]Run pre-commit run --show-diff-on-failure --color=always --all-files\n\u001b[36;1mpre-commit run --show-diff-on-failure --color=always --all-files\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.9.18/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib\n##[endgroup]\nfix end of files.........................................................\u001b[42mPassed\u001b[m\ndebug statements (python)................................................\u001b[42mPassed\u001b[m\nabsolufy-imports.........................................................\u001b[42mPassed\u001b[m\nisort....................................................................\u001b[42mPassed\u001b[m\npyupgrade................................................................\u001b[42mPassed\u001b[m\nblack....................................................................\u001b[41mFailed\u001b[m\n\u001b[2m- hook id: black\u001b[m\n\u001b[2m- files were modified by this hook\u001b[m\n\n\u001b[1mreformatted dask/utils.py\u001b[0m\n\n\u001b[1mAll done! \u2728 \ud83c\udf70 \u2728\u001b[0m\n\u001b[34m\u001b[1m1 file \u001b[0m\u001b[1mreformatted\u001b[0m, \u001b[34m257 files \u001b[0mleft unchanged.\n\nflake8...................................................................\u001b[42mPassed\u001b[m\ncodespell................................................................\u001b[42mPassed\u001b[m\nmypy.....................................................................\u001b[41mFailed\u001b[m\n\u001b[2m- hook id: mypy\u001b[m\n\u001b[2m- exit code: 1\u001b[m\n\ndask/utils.py:231: error: Unsupported operand types for + (\"str\" and \"None\") [operator]\ndask/utils.py:231: note: Right operand is of type \"Optional[str]\"\ndask/utils.py:257: error: Unsupported operand types for + (\"str\" and \"None\") [operator]\ndask/utils.py:257: note: Right operand is of type \"Optional[str]\"\nFound 2 errors in 1 file (checked 258 source files)\n\npre-commit hook(s) made changes.\nIf you are seeing this message in CI, reproduce locally with: `pre-commit run --all-files`.\nTo run `pre-commit` as part of git workflow, use `pre-commit install`.\nAll changes made by hooks:\n\u001b[1mdiff --git a/dask/utils.py b/dask/utils.py\u001b[m\n\u001b[1mindex 80ea6e8..26f1176 100644\u001b[m\n\u001b[1m--- a/dask/utils.py\u001b[m\n\u001b[1m+++ b/dask/utils.py\u001b[m\n\u001b[36m@@ -148,7 +148,7 @@\u001b[m \u001b[mdef _deprecated_kwarg(\u001b[m\n new_arg_name: str | None = None,\u001b[m\n mapping: Mapping[Any, Any] | Callable[[Any], Any] | None = None,\u001b[m\n stacklevel: int = 2,\u001b[m\n\u001b[31m- comment: str | None = None\u001b[m\n\u001b[32m+\u001b[m\u001b[32m comment: str | None = None,\u001b[m\n ) -> Callable[[F], F]:\u001b[m\n \"\"\"\u001b[m\n Decorator to deprecate a keyword argument of a function.\u001b[m\n##[error]Process completed with exit code 1.\n"}], "diff": "diff --git a/dask/utils.py b/dask/utils.py\nindex 80ea6e8d6..0da2953c8 100644\n--- a/dask/utils.py\n+++ b/dask/utils.py\n@@ -148,7 +148,7 @@ def _deprecated_kwarg(\n new_arg_name: str | None = None,\n mapping: Mapping[Any, Any] | Callable[[Any], Any] | None = None,\n stacklevel: int = 2,\n- comment: str | None = None\n+ comment: str | None = None,\n ) -> Callable[[F], F]:\n \"\"\"\n Decorator to deprecate a keyword argument of a function.\n@@ -219,7 +219,7 @@ def _deprecated_kwarg(\n \"mapping from old to new argument values must be dict or callable!\"\n )\n \n- comment = f\"\\n{comment}\" or \"\"\n+ comment_ = f\"\\n{comment}\" or \"\"\n \n def _deprecated_kwarg(func: F) -> F:\n @wraps(func)\n@@ -232,7 +232,7 @@ def _deprecated_kwarg(\n f\"the {repr(old_arg_name)} keyword is deprecated and \"\n \"will be removed in a future version. Please take \"\n f\"steps to stop the use of {repr(old_arg_name)}\"\n- ) + comment\n+ ) + comment_\n warnings.warn(msg, FutureWarning, stacklevel=stacklevel)\n kwargs[old_arg_name] = old_arg_value\n return func(*args, **kwargs)\n@@ -254,7 +254,7 @@ def _deprecated_kwarg(\n f\"use {repr(new_arg_name)} instead.\"\n )\n \n- warnings.warn(msg + comment, FutureWarning, stacklevel=stacklevel)\n+ warnings.warn(msg + comment_, FutureWarning, stacklevel=stacklevel)\n if kwargs.get(new_arg_name) is not None:\n msg = (\n f\"Can only specify {repr(old_arg_name)} \"\n", "difficulty": 0, "changed_files": ["dask/utils.py"], "commit_link": "https://github.com/dask/dask/tree/16a0c04d06205527ec5e379df2596b399ee5dadc"}
data/python/1afe2c9.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"language": "Python", "id": 2, "repo_owner": "encode", "repo_name": "httpx", "head_branch": "add-ssl-context-argument", "workflow_name": "Test Suite", "workflow_filename": "test-suite.yml", "workflow_path": ".github/workflows/test-suite.yml", "contributor": "karpetrosyan", "sha_fail": "1afe2c9cb192d3760d59190cc7892e7ac37d5e27", "sha_success": "6be802e03bdb78d20f9c8df63a43f9167dcbfd49", "workflow": "---\nname: Test Suite\n\non:\n push:\n branches: [\"master\"]\n pull_request:\n branches: [\"master\"]\n\njobs:\n tests:\n name: \"Python ${{ matrix.python-version }}\"\n runs-on: \"ubuntu-latest\"\n\n strategy:\n matrix:\n python-version: [\"3.8\", \"3.9\", \"3.10\", \"3.11\", \"3.12\"]\n\n steps:\n - uses: \"actions/checkout@v4\"\n - uses: \"actions/setup-python@v4\"\n with:\n python-version: \"${{ matrix.python-version }}\"\n allow-prereleases: true\n - name: \"Install dependencies\"\n run: \"scripts/install\"\n - name: \"Run linting checks\"\n run: \"scripts/check\"\n - name: \"Build package & docs\"\n run: \"scripts/build\"\n - name: \"Run tests\"\n run: \"scripts/test\"\n - name: \"Enforce coverage\"\n run: \"scripts/coverage\"\n", "logs": [{"step_name": "Python 3.10/5_Run linting checks.txt", "log": "##[group]Run scripts/check\n\u001b[36;1mscripts/check\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.10.13/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.10.13/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.10.13/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.10.13/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.10.13/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.10.13/x64/lib\n##[endgroup]\n+ ./scripts/sync-version\nCHANGELOG_VERSION: 0.26.0\nVERSION: 0.26.0\n+ ruff format httpx tests --diff\n60 files left unchanged\n+ mypy httpx tests\nhttpx/_client.py:1456: error: Unexpected keyword argument \"verify\" for \"AsyncHTTPTransport\" [call-arg]\nhttpx/_client.py:1458: error: Name \"verify\" is not defined [name-defined]\nhttpx/_client.py:1459: error: Name \"cert\" is not defined [name-defined]\nhttpx/_transports/default.py:260: note: \"AsyncHTTPTransport\" defined here\nhttpx/_client.py:1456: error: Unexpected keyword argument \"cert\" for \"AsyncHTTPTransport\" [call-arg]\nhttpx/_transports/default.py:260: note: \"AsyncHTTPTransport\" defined here\nFound 4 errors in 1 file (checked 60 source files)\n##[error]Process completed with exit code 1.\n"}, {"step_name": "Python 3.11/5_Run linting checks.txt", "log": "##[group]Run scripts/check\n\u001b[36;1mscripts/check\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.11.7/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.11.7/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.11.7/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.11.7/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.11.7/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.11.7/x64/lib\n##[endgroup]\n+ ./scripts/sync-version\nCHANGELOG_VERSION: 0.26.0\nVERSION: 0.26.0\n+ ruff format httpx tests --diff\n60 files left unchanged\n+ mypy httpx tests\nhttpx/_client.py:1456: error: Unexpected keyword argument \"verify\" for \"AsyncHTTPTransport\" [call-arg]\nhttpx/_client.py:1458: error: Name \"verify\" is not defined [name-defined]\nhttpx/_client.py:1459: error: Name \"cert\" is not defined [name-defined]\nhttpx/_transports/default.py:260: note: \"AsyncHTTPTransport\" defined here\nhttpx/_client.py:1456: error: Unexpected keyword argument \"cert\" for \"AsyncHTTPTransport\" [call-arg]\nhttpx/_transports/default.py:260: note: \"AsyncHTTPTransport\" defined here\nFound 4 errors in 1 file (checked 60 source files)\n##[error]Process completed with exit code 1.\n"}, {"step_name": "Python 3.12/5_Run linting checks.txt", "log": "##[group]Run scripts/check\n\u001b[36;1mscripts/check\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.12.1/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.12.1/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.1/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.1/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.1/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.12.1/x64/lib\n##[endgroup]\n+ ./scripts/sync-version\nCHANGELOG_VERSION: 0.26.0\nVERSION: 0.26.0\n+ ruff format httpx tests --diff\n60 files left unchanged\n+ mypy httpx tests\nhttpx/_client.py:1456: error: Unexpected keyword argument \"verify\" for \"AsyncHTTPTransport\" [call-arg]\nhttpx/_client.py:1458: error: Name \"verify\" is not defined [name-defined]\nhttpx/_client.py:1459: error: Name \"cert\" is not defined [name-defined]\nhttpx/_transports/default.py:260: note: \"AsyncHTTPTransport\" defined here\nhttpx/_client.py:1456: error: Unexpected keyword argument \"cert\" for \"AsyncHTTPTransport\" [call-arg]\nhttpx/_transports/default.py:260: note: \"AsyncHTTPTransport\" defined here\nFound 4 errors in 1 file (checked 60 source files)\n##[error]Process completed with exit code 1.\n"}], "diff": "diff --git a/httpx/_client.py b/httpx/_client.py\nindex e5cff5d..4dd2d07 100644\n--- a/httpx/_client.py\n+++ b/httpx/_client.py\n@@ -1455,8 +1455,6 @@ class AsyncClient(BaseClient):\n ) -> AsyncBaseTransport:\n return AsyncHTTPTransport(\n ssl_context=ssl_context,\n- verify=verify,\n- cert=cert,\n http1=http1,\n http2=http2,\n limits=limits,\n", "difficulty": 2, "changed_files": ["httpx/_client.py"], "commit_link": "https://github.com/encode/httpx/tree/1afe2c9cb192d3760d59190cc7892e7ac37d5e27"}
data/python/2201be2.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"language": "Python", "id": 54, "repo_owner": "mwaskom", "repo_name": "seaborn", "head_branch": "plot/layout_rect", "workflow_name": "CI", "workflow_filename": "ci.yaml", "workflow_path": ".github/workflows/ci.yaml", "contributor": "mwaskom", "sha_fail": "2201be21886bb82201f3c3487f5f1468f6e6ac81", "sha_success": "596ad4f85f31f39bcedd2fbca935647b1a2fdf84", "workflow": "name: CI\n\non:\n push:\n branches: [master, v0.*]\n pull_request:\n branches: master\n schedule:\n - cron: '0 6 * * 1,4' # Each Monday and Thursday at 06:00 UTC\n workflow_dispatch:\n\npermissions:\n contents: read\n\nenv:\n NB_KERNEL: python\n MPLBACKEND: Agg\n SEABORN_DATA: ${{ github.workspace }}/seaborn-data\n\njobs:\n build-docs:\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1\n\n - name: Setup Python 3.11\n uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # v4.7.1\n with:\n python-version: \"3.11\"\n\n - name: Install seaborn\n run: |\n pip install --upgrade pip\n pip install .[stats,docs]\n\n - name: Install pandoc\n run: |\n sudo apt-get install pandoc\n\n - name: Cache datasets\n run: |\n git clone https://github.com/mwaskom/seaborn-data.git\n ls $SEABORN_DATA\n\n - name: Build docs\n env:\n SPHINXOPTS: -j `nproc`\n run: |\n cd doc\n make -j `nproc` notebooks\n make html\n\n\n run-tests:\n runs-on: ubuntu-latest\n\n strategy:\n matrix:\n python: [\"3.8\", \"3.9\", \"3.10\", \"3.11\", \"3.12\"]\n install: [full]\n deps: [latest]\n\n include:\n - python: \"3.8\"\n install: full\n deps: pinned\n - python: \"3.11\"\n install: light\n deps: latest\n\n steps:\n - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1\n\n - name: Setup Python ${{ matrix.python }}\n uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # v4.7.1\n with:\n python-version: ${{ matrix.python }}\n allow-prereleases: true\n\n - name: Install seaborn\n run: |\n pip install --upgrade pip wheel\n if [[ ${{matrix.install}} == 'full' ]]; then EXTRAS=',stats'; fi\n if [[ ${{matrix.deps }} == 'pinned' ]]; then DEPS='-r ci/deps_pinned.txt'; fi\n pip install .[dev$EXTRAS] $DEPS\n\n - name: Run tests\n run: make test\n\n - name: Upload coverage\n uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # v3.1.4\n if: ${{ success() }}\n\n lint:\n runs-on: ubuntu-latest\n strategy:\n fail-fast: false\n steps:\n\n - name: Checkout\n uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1\n\n - name: Setup Python\n uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # v4.7.1\n\n - name: Install tools\n run: pip install mypy flake8\n\n - name: Flake8\n run: make lint\n\n - name: Type checking\n run: make typecheck\n", "logs": [{"step_name": "build-docs/7_Build docs.txt", "log": "##[group]Run cd doc\n\u001b[36;1mcd doc\u001b[0m\n\u001b[36;1mmake -j `nproc` notebooks\u001b[0m\n\u001b[36;1mmake html\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n NB_KERNEL: python\n MPLBACKEND: Agg\n SEABORN_DATA: /home/runner/work/seaborn/seaborn/seaborn-data\n pythonLocation: /opt/hostedtoolcache/Python/3.11.7/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.11.7/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.11.7/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.11.7/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.11.7/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.11.7/x64/lib\n SPHINXOPTS: -j `nproc`\n##[endgroup]\nmake[1]: Entering directory '/home/runner/work/seaborn/seaborn/doc/_tutorial'\n../tools/nb_to_doc.py aesthetics.ipynb ../tutorial\n../tools/nb_to_doc.py axis_grids.ipynb ../tutorial\nmake[1]: Entering directory '/home/runner/work/seaborn/seaborn/doc/_docstrings'\n../tools/nb_to_doc.py FacetGrid.ipynb ../docstrings\n../tools/nb_to_doc.py categorical.ipynb ../tutorial\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n../tools/nb_to_doc.py JointGrid.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n/opt/hostedtoolcache/Python/3.11.7/x64/lib/python3.11/site-packages/nbconvert/utils/pandoc.py:51: RuntimeWarning: You are using an unsupported version of pandoc (2.9.2.1).\nYour version must be at least (2.14.2) but less than (4.0.0).\nRefer to https://pandoc.org/installing.html.\nContinuing with doubts...\n check_pandoc_version()\n../tools/nb_to_doc.py PairGrid.ipynb ../docstrings\n0.01s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n/opt/hostedtoolcache/Python/3.11.7/x64/lib/python3.11/site-packages/nbconvert/utils/pandoc.py:51: RuntimeWarning: You are using an unsupported version of pandoc (2.9.2.1).\nYour version must be at least (2.14.2) but less than (4.0.0).\nRefer to https://pandoc.org/installing.html.\nContinuing with doubts...\n check_pandoc_version()\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n../tools/nb_to_doc.py color_palettes.ipynb ../tutorial\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.01s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n/opt/hostedtoolcache/Python/3.11.7/x64/lib/python3.11/site-packages/nbconvert/utils/pandoc.py:51: RuntimeWarning: You are using an unsupported version of pandoc (2.9.2.1).\nYour version must be at least (2.14.2) but less than (4.0.0).\nRefer to https://pandoc.org/installing.html.\nContinuing with doubts...\n check_pandoc_version()\n../tools/nb_to_doc.py axes_style.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n../tools/nb_to_doc.py data_structure.ipynb ../tutorial\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n/opt/hostedtoolcache/Python/3.11.7/x64/lib/python3.11/site-packages/nbconvert/utils/pandoc.py:51: RuntimeWarning: You are using an unsupported version of pandoc (2.9.2.1).\nYour version must be at least (2.14.2) but less than (4.0.0).\nRefer to https://pandoc.org/installing.html.\nContinuing with doubts...\n check_pandoc_version()\n../tools/nb_to_doc.py barplot.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.01s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n../tools/nb_to_doc.py blend_palette.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n../tools/nb_to_doc.py distributions.ipynb ../tutorial\n../tools/nb_to_doc.py boxenplot.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n../tools/nb_to_doc.py error_bars.ipynb ../tutorial\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n../tools/nb_to_doc.py boxplot.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n../tools/nb_to_doc.py function_overview.ipynb ../tutorial\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n/opt/hostedtoolcache/Python/3.11.7/x64/lib/python3.11/site-packages/nbconvert/utils/pandoc.py:51: RuntimeWarning: You are using an unsupported version of pandoc (2.9.2.1).\nYour version must be at least (2.14.2) but less than (4.0.0).\nRefer to https://pandoc.org/installing.html.\nContinuing with doubts...\n check_pandoc_version()\n../tools/nb_to_doc.py catplot.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n../tools/nb_to_doc.py clustermap.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n/opt/hostedtoolcache/Python/3.11.7/x64/lib/python3.11/site-packages/nbconvert/utils/pandoc.py:51: RuntimeWarning: You are using an unsupported version of pandoc (2.9.2.1).\nYour version must be at least (2.14.2) but less than (4.0.0).\nRefer to https://pandoc.org/installing.html.\nContinuing with doubts...\n check_pandoc_version()\n../tools/nb_to_doc.py color_palette.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.01s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.01s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n../tools/nb_to_doc.py introduction.ipynb ../tutorial\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n../tools/nb_to_doc.py countplot.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n../tools/nb_to_doc.py cubehelix_palette.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n../tools/nb_to_doc.py dark_palette.ipynb ../docstrings\n../tools/nb_to_doc.py objects_interface.ipynb ../tutorial\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n../tools/nb_to_doc.py displot.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n../tools/nb_to_doc.py diverging_palette.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.01s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n../tools/nb_to_doc.py ecdfplot.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n/opt/hostedtoolcache/Python/3.11.7/x64/lib/python3.11/site-packages/nbconvert/utils/pandoc.py:51: RuntimeWarning: You are using an unsupported version of pandoc (2.9.2.1).\nYour version must be at least (2.14.2) but less than (4.0.0).\nRefer to https://pandoc.org/installing.html.\nContinuing with doubts...\n check_pandoc_version()\n../tools/nb_to_doc.py heatmap.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n/opt/hostedtoolcache/Python/3.11.7/x64/lib/python3.11/site-packages/nbconvert/utils/pandoc.py:51: RuntimeWarning: You are using an unsupported version of pandoc (2.9.2.1).\nYour version must be at least (2.14.2) but less than (4.0.0).\nRefer to https://pandoc.org/installing.html.\nContinuing with doubts...\n check_pandoc_version()\n../tools/nb_to_doc.py histplot.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.01s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n../tools/nb_to_doc.py hls_palette.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n../tools/nb_to_doc.py properties.ipynb ../tutorial\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n../tools/nb_to_doc.py husl_palette.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.01s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n../tools/nb_to_doc.py jointplot.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n/opt/hostedtoolcache/Python/3.11.7/x64/lib/python3.11/site-packages/nbconvert/utils/pandoc.py:51: RuntimeWarning: You are using an unsupported version of pandoc (2.9.2.1).\nYour version must be at least (2.14.2) but less than (4.0.0).\nRefer to https://pandoc.org/installing.html.\nContinuing with doubts...\n check_pandoc_version()\n../tools/nb_to_doc.py regression.ipynb ../tutorial\n../tools/nb_to_doc.py kdeplot.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n/opt/hostedtoolcache/Python/3.11.7/x64/lib/python3.11/site-packages/nbconvert/utils/pandoc.py:51: RuntimeWarning: You are using an unsupported version of pandoc (2.9.2.1).\nYour version must be at least (2.14.2) but less than (4.0.0).\nRefer to https://pandoc.org/installing.html.\nContinuing with doubts...\n check_pandoc_version()\n../tools/nb_to_doc.py relational.ipynb ../tutorial\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n../tools/nb_to_doc.py light_palette.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.01s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n/opt/hostedtoolcache/Python/3.11.7/x64/lib/python3.11/site-packages/nbconvert/utils/pandoc.py:51: RuntimeWarning: You are using an unsupported version of pandoc (2.9.2.1).\nYour version must be at least (2.14.2) but less than (4.0.0).\nRefer to https://pandoc.org/installing.html.\nContinuing with doubts...\n check_pandoc_version()\n../tools/nb_to_doc.py lineplot.ipynb ../docstrings\n../tools/nb_to_doc.py lmplot.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.01s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.01s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n../tools/nb_to_doc.py move_legend.ipynb ../docstrings\n../tools/nb_to_doc.py mpl_palette.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n/opt/hostedtoolcache/Python/3.11.7/x64/lib/python3.11/site-packages/nbconvert/utils/pandoc.py:51: RuntimeWarning: You are using an unsupported version of pandoc (2.9.2.1).\nYour version must be at least (2.14.2) but less than (4.0.0).\nRefer to https://pandoc.org/installing.html.\nContinuing with doubts...\n check_pandoc_version()\n../tools/nb_to_doc.py objects.Agg.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n../tools/nb_to_doc.py objects.Area.ipynb ../docstrings\n../tools/nb_to_doc.py objects.Band.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.01s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n../tools/nb_to_doc.py objects.Bar.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\nmake[1]: Leaving directory '/home/runner/work/seaborn/seaborn/doc/_tutorial'\n../tools/nb_to_doc.py objects.Bars.ipynb ../docstrings\n../tools/nb_to_doc.py objects.Count.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.01s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n/opt/hostedtoolcache/Python/3.11.7/x64/lib/python3.11/site-packages/nbconvert/utils/pandoc.py:51: RuntimeWarning: You are using an unsupported version of pandoc (2.9.2.1).\nYour version must be at least (2.14.2) but less than (4.0.0).\nRefer to https://pandoc.org/installing.html.\nContinuing with doubts...\n check_pandoc_version()\n../tools/nb_to_doc.py objects.Dash.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n../tools/nb_to_doc.py objects.Dodge.ipynb ../docstrings\n../tools/nb_to_doc.py objects.Dot.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n../tools/nb_to_doc.py objects.Dots.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.01s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n../tools/nb_to_doc.py objects.Est.ipynb ../docstrings\n0.01s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n../tools/nb_to_doc.py objects.Hist.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n../tools/nb_to_doc.py objects.Jitter.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n/opt/hostedtoolcache/Python/3.11.7/x64/lib/python3.11/site-packages/nbconvert/utils/pandoc.py:51: RuntimeWarning: You are using an unsupported version of pandoc (2.9.2.1).\nYour version must be at least (2.14.2) but less than (4.0.0).\nRefer to https://pandoc.org/installing.html.\nContinuing with doubts...\n check_pandoc_version()\n../tools/nb_to_doc.py objects.KDE.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n/opt/hostedtoolcache/Python/3.11.7/x64/lib/python3.11/site-packages/nbconvert/utils/pandoc.py:51: RuntimeWarning: You are using an unsupported version of pandoc (2.9.2.1).\nYour version must be at least (2.14.2) but less than (4.0.0).\nRefer to https://pandoc.org/installing.html.\nContinuing with doubts...\n check_pandoc_version()\n../tools/nb_to_doc.py objects.Line.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n../tools/nb_to_doc.py objects.Lines.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n../tools/nb_to_doc.py objects.Norm.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n../tools/nb_to_doc.py objects.Path.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n../tools/nb_to_doc.py objects.Paths.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n../tools/nb_to_doc.py objects.Perc.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n/opt/hostedtoolcache/Python/3.11.7/x64/lib/python3.11/site-packages/nbconvert/utils/pandoc.py:51: RuntimeWarning: You are using an unsupported version of pandoc (2.9.2.1).\nYour version must be at least (2.14.2) but less than (4.0.0).\nRefer to https://pandoc.org/installing.html.\nContinuing with doubts...\n check_pandoc_version()\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n../tools/nb_to_doc.py objects.Plot.add.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.01s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n../tools/nb_to_doc.py objects.Plot.config.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.01s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n../tools/nb_to_doc.py objects.Plot.facet.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n../tools/nb_to_doc.py objects.Plot.label.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n../tools/nb_to_doc.py objects.Plot.layout.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n../tools/nb_to_doc.py objects.Plot.limit.ipynb ../docstrings\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n/opt/hostedtoolcache/Python/3.11.7/x64/lib/python3.11/site-packages/nbconvert/utils/pandoc.py:51: RuntimeWarning: You are using an unsupported version of pandoc (2.9.2.1).\nYour version must be at least (2.14.2) but less than (4.0.0).\nRefer to https://pandoc.org/installing.html.\nContinuing with doubts...\n check_pandoc_version()\n../tools/nb_to_doc.py objects.Plot.on.ipynb ../docstrings\nTraceback (most recent call last):\n File \"/home/runner/work/seaborn/seaborn/doc/_docstrings/../tools/nb_to_doc.py\", line 126, in <module>\n ep.preprocess(nb, {\"metadata\": {\"path\": basedir}})\n File \"/opt/hostedtoolcache/Python/3.11.7/x64/lib/python3.11/site-packages/nbconvert/preprocessors/execute.py\", line 102, in preprocess\n self.preprocess_cell(cell, resources, index)\n File \"/opt/hostedtoolcache/Python/3.11.7/x64/lib/python3.11/site-packages/nbconvert/preprocessors/execute.py\", line 123, in preprocess_cell\n cell = self.execute_cell(cell, index, store_history=True)\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n File \"/opt/hostedtoolcache/Python/3.11.7/x64/lib/python3.11/site-packages/jupyter_core/utils/__init__.py\", line 173, in wrapped\n return loop.run_until_complete(inner)\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n File \"/opt/hostedtoolcache/Python/3.11.7/x64/lib/python3.11/asyncio/base_events.py\", line 653, in run_until_complete\n return future.result()\n ^^^^^^^^^^^^^^^\n File \"/opt/hostedtoolcache/Python/3.11.7/x64/lib/python3.11/site-packages/nbclient/client.py\", line 1062, in async_execute_cell\n await self._check_raise_for_error(cell, cell_index, exec_reply)\n File \"/opt/hostedtoolcache/Python/3.11.7/x64/lib/python3.11/site-packages/nbclient/client.py\", line 918, in _check_raise_for_error\n raise CellExecutionError.from_cell_and_msg(cell, exec_reply_content)\nnbclient.exceptions.CellExecutionError: An error occurred while executing the following cell:\n------------------\np.layout(extent=[0, 0, .8, 1]).show()\n------------------\n\n\n\u001b[0;31m---------------------------------------------------------------------------\u001b[0m\n\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)\nCell \u001b[0;32mIn[5], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[43mp\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mlayout\u001b[49m\u001b[43m(\u001b[49m\u001b[43mextent\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m[\u001b[49m\u001b[38;5;241;43m0\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m0\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m.8\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m1\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mshow\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\nFile \u001b[0;32m/opt/hostedtoolcache/Python/3.11.7/x64/lib/python3.11/site-packages/seaborn/_core/plot.py:930\u001b[0m, in \u001b[0;36mPlot.show\u001b[0;34m(self, **kwargs)\u001b[0m\n\u001b[1;32m 913\u001b[0m \u001b[38;5;250m\u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m 914\u001b[0m \u001b[38;5;124;03mCompile the plot and display it by hooking into pyplot.\u001b[39;00m\n\u001b[1;32m 915\u001b[0m \n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 922\u001b[0m \n\u001b[1;32m 923\u001b[0m \u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m 924\u001b[0m \u001b[38;5;66;03m# TODO make pyplot configurable at the class level, and when not using,\u001b[39;00m\n\u001b[1;32m 925\u001b[0m \u001b[38;5;66;03m# import IPython.display and call on self to populate cell output?\u001b[39;00m\n\u001b[1;32m 926\u001b[0m \n\u001b[1;32m 927\u001b[0m \u001b[38;5;66;03m# Keep an eye on whether matplotlib implements \"attaching\" an existing\u001b[39;00m\n\u001b[1;32m 928\u001b[0m \u001b[38;5;66;03m# figure to pyplot: https://github.com/matplotlib/matplotlib/pull/14024\u001b[39;00m\n\u001b[0;32m--> 930\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mplot\u001b[49m\u001b[43m(\u001b[49m\u001b[43mpyplot\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m)\u001b[49m\u001b[38;5;241m.\u001b[39mshow(\u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\nFile \u001b[0;32m/opt/hostedtoolcache/Python/3.11.7/x64/lib/python3.11/site-packages/seaborn/_core/plot.py:937\u001b[0m, in \u001b[0;36mPlot.plot\u001b[0;34m(self, pyplot)\u001b[0m\n\u001b[1;32m 933\u001b[0m \u001b[38;5;250m\u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m 934\u001b[0m \u001b[38;5;124;03mCompile the plot spec and return the Plotter object.\u001b[39;00m\n\u001b[1;32m 935\u001b[0m \u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m 936\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m theme_context(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_theme_with_defaults()):\n\u001b[0;32m--> 937\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_plot\u001b[49m\u001b[43m(\u001b[49m\u001b[43mpyplot\u001b[49m\u001b[43m)\u001b[49m\n\nFile \u001b[0;32m/opt/hostedtoolcache/Python/3.11.7/x64/lib/python3.11/site-packages/seaborn/_core/plot.py:971\u001b[0m, in \u001b[0;36mPlot._plot\u001b[0;34m(self, pyplot)\u001b[0m\n\u001b[1;32m 969\u001b[0m \u001b[38;5;66;03m# Add various figure decorations\u001b[39;00m\n\u001b[1;32m 970\u001b[0m plotter\u001b[38;5;241m.\u001b[39m_make_legend(\u001b[38;5;28mself\u001b[39m)\n\u001b[0;32m--> 971\u001b[0m \u001b[43mplotter\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_finalize_figure\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[1;32m 973\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m plotter\n\nFile \u001b[0;32m/opt/hostedtoolcache/Python/3.11.7/x64/lib/python3.11/site-packages/seaborn/_core/plot.py:1815\u001b[0m, in \u001b[0;36mPlotter._finalize_figure\u001b[0;34m(self, p)\u001b[0m\n\u001b[1;32m 1812\u001b[0m set_layout_engine(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_figure, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mtight\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 1814\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m (extent \u001b[38;5;241m:=\u001b[39m p\u001b[38;5;241m.\u001b[39m_layout_spec\u001b[38;5;241m.\u001b[39mget(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mextent\u001b[39m\u001b[38;5;124m\"\u001b[39m)) \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m-> 1815\u001b[0m engine \u001b[38;5;241m=\u001b[39m \u001b[43mget_layout_engine\u001b[49m(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_figure)\n\u001b[1;32m 1816\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m engine \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m 1817\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_figure\u001b[38;5;241m.\u001b[39msubplots_adjust(\u001b[38;5;241m*\u001b[39mextent)\n\n\u001b[0;31mNameError\u001b[0m: name 'get_layout_engine' is not defined\n\nmake[1]: *** [Makefile:7: ../docstrings/objects.Plot.layout.rst] Error 1\nmake[1]: *** Waiting for unfinished jobs....\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n/opt/hostedtoolcache/Python/3.11.7/x64/lib/python3.11/site-packages/nbconvert/utils/pandoc.py:51: RuntimeWarning: You are using an unsupported version of pandoc (2.9.2.1).\nYour version must be at least (2.14.2) but less than (4.0.0).\nRefer to https://pandoc.org/installing.html.\nContinuing with doubts...\n check_pandoc_version()\n0.00s - Debugger warning: It seems that frozen modules are being used, which may\n0.00s - make the debugger miss breakpoints. Please pass -Xfrozen_modules=off\n0.00s - to python to disable frozen modules.\n0.00s - Note: Debugging will proceed. Set PYDEVD_DISABLE_FILE_VALIDATION=1 to disable this validation.\n/opt/hostedtoolcache/Python/3.11.7/x64/lib/python3.11/site-packages/nbconvert/utils/pandoc.py:51: RuntimeWarning: You are using an unsupported version of pandoc (2.9.2.1).\nYour version must be at least (2.14.2) but less than (4.0.0).\nRefer to https://pandoc.org/installing.html.\nContinuing with doubts...\n check_pandoc_version()\nmake[1]: Leaving directory '/home/runner/work/seaborn/seaborn/doc/_docstrings'\nmake: *** [Makefile:60: docstrings] Error 2\n##[error]Process completed with exit code 2.\n"}, {"step_name": "lint/5_Flake8.txt", "log": "##[group]Run make lint\n\u001b[36;1mmake lint\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n NB_KERNEL: python\n MPLBACKEND: Agg\n SEABORN_DATA: /home/runner/work/seaborn/seaborn/seaborn-data\n##[endgroup]\nflake8 seaborn/ tests/\nseaborn/_core/plot.py:1815:22: F821 undefined name 'get_layout_engine'\nmake: *** [Makefile:7: lint] Error 1\n##[error]Process completed with exit code 2.\n"}, {"step_name": "run-tests (3.11, light, latest)/5_Run tests.txt", "log": "##[group]Run make test\n\u001b[36;1mmake test\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n NB_KERNEL: python\n MPLBACKEND: Agg\n SEABORN_DATA: /home/runner/work/seaborn/seaborn/seaborn-data\n pythonLocation: /opt/hostedtoolcache/Python/3.11.7/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.11.7/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.11.7/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.11.7/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.11.7/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.11.7/x64/lib\n##[endgroup]\npytest -n auto --cov=seaborn --cov=tests --cov-config=setup.cfg tests\n============================= test session starts ==============================\nplatform linux -- Python 3.11.7, pytest-7.4.3, pluggy-1.3.0\nrootdir: /home/runner/work/seaborn/seaborn\nconfigfile: pyproject.toml\nplugins: cov-4.1.0, xdist-3.5.0\ncreated: 4/4 workers\n4 workers [2379 items]\n\n........................................................................ [ 3%]\n........................................................................ [ 6%]\n........................................................................ [ 9%]\n........................................................................ [ 12%]\n........................................................................ [ 15%]\n........................................................................ [ 18%]\n........................................................................ [ 21%]\n........................................................................ [ 24%]\n........................................................................ [ 27%]\n........................................................................ [ 30%]\n........................................................................ [ 33%]\n.s...................................................................... [ 36%]\n........................................................................ [ 39%]\n........................................................................ [ 42%]\n.................................................s.s................ss.. [ 45%]\n..ss.............ssss................................................sss [ 48%]\nsssssssssssssssss.sssssssssssssssssss.ssssssssssssss.................... [ 51%]\n...............................................s........................ [ 54%]\n........................................................................ [ 57%]\n........................................................................ [ 60%]\n..............................................................s......... [ 63%]\n..............................................................x......... [ 66%]\n......x.........x.........................................x..x.......... [ 69%]\n.............................................s.....s.................... [ 72%]\n..................s...................................................... [ 75%]\n........................................................................ [ 78%]\n........................................................................ [ 81%]\n........................................................................ [ 84%]\n..................F.F..x................................................ [ 87%]\n...........................F............................................ [ 90%]\n........................................................................ [ 93%]\n........................................................................ [ 96%]\n........................s..s............................................ [ 99%]\n.. [100%]\n=================================== FAILURES ===================================\n_______________________ TestPlotting.test_layout_extent ________________________\n[gw1] linux -- Python 3.11.7 /opt/hostedtoolcache/Python/3.11.7/x64/bin/python\n\nself = <tests._core.test_plot.TestPlotting object at 0x7f06656abb10>\n\n @pytest.mark.skipif(\n _version_predates(mpl, \"3.6\"),\n reason=\"mpl<3.6 does not have get_layout_engine\",\n )\n def test_layout_extent(self):\n \n> p = Plot().layout(extent=(.1, .2, .6, 1)).plot()\n\ntests/_core/test_plot.py:1100: \n_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ \nseaborn/_core/plot.py:937: in plot\n return self._plot(pyplot)\nseaborn/_core/plot.py:971: in _plot\n plotter._finalize_figure(self)\n_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ \n\nself = <seaborn._core.plot.Plotter object at 0x7f0654edebd0>\np = <seaborn._core.plot.Plot object at 0x7f06567fe150>\n\n def _finalize_figure(self, p: Plot) -> None:\n \n for sub in self._subplots:\n ax = sub[\"ax\"]\n for axis in \"xy\":\n axis_key = sub[axis]\n axis_obj = getattr(ax, f\"{axis}axis\")\n \n # Axis limits\n if axis_key in p._limits or axis in p._limits:\n convert_units = getattr(ax, f\"{axis}axis\").convert_units\n a, b = p._limits.get(axis_key) or p._limits[axis]\n lo = a if a is None else convert_units(a)\n hi = b if b is None else convert_units(b)\n if isinstance(a, str):\n lo = cast(float, lo) - 0.5\n if isinstance(b, str):\n hi = cast(float, hi) + 0.5\n ax.set(**{f\"{axis}lim\": (lo, hi)})\n \n if axis_key in self._scales: # TODO when would it not be?\n self._scales[axis_key]._finalize(p, axis_obj)\n \n if (engine_name := p._layout_spec.get(\"engine\", default)) is not default:\n # None is a valid arg for Figure.set_layout_engine, hence `default`\n set_layout_engine(self._figure, engine_name)\n elif p._target is None:\n # Don't modify the layout engine if the user supplied their own\n # matplotlib figure and didn't specify an engine through Plot\n # TODO switch default to \"constrained\"?\n # TODO either way, make configurable\n set_layout_engine(self._figure, \"tight\")\n \n if (extent := p._layout_spec.get(\"extent\")) is not None:\n> engine = get_layout_engine(self._figure)\nE NameError: name 'get_layout_engine' is not defined\n\nseaborn/_core/plot.py:1815: NameError\n_________________ TestPlotting.test_constrained_layout_extent __________________\n[gw1] linux -- Python 3.11.7 /opt/hostedtoolcache/Python/3.11.7/x64/bin/python\n\nself = <tests._core.test_plot.TestPlotting object at 0x7f0665702410>\n\n @pytest.mark.skipif(\n _version_predates(mpl, \"3.6\"),\n reason=\"mpl<3.6 does not have get_layout_engine\",\n )\n def test_constrained_layout_extent(self):\n \n> p = Plot().layout(engine=\"constrained\", extent=(.1, .2, .6, 1)).plot()\n\ntests/_core/test_plot.py:1109: \n_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ \nseaborn/_core/plot.py:937: in plot\n return self._plot(pyplot)\nseaborn/_core/plot.py:971: in _plot\n plotter._finalize_figure(self)\n_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ \n\nself = <seaborn._core.plot.Plotter object at 0x7f0656f58050>\np = <seaborn._core.plot.Plot object at 0x7f0656f597d0>\n\n def _finalize_figure(self, p: Plot) -> None:\n \n for sub in self._subplots:\n ax = sub[\"ax\"]\n for axis in \"xy\":\n axis_key = sub[axis]\n axis_obj = getattr(ax, f\"{axis}axis\")\n \n # Axis limits\n if axis_key in p._limits or axis in p._limits:\n convert_units = getattr(ax, f\"{axis}axis\").convert_units\n a, b = p._limits.get(axis_key) or p._limits[axis]\n lo = a if a is None else convert_units(a)\n hi = b if b is None else convert_units(b)\n if isinstance(a, str):\n lo = cast(float, lo) - 0.5\n if isinstance(b, str):\n hi = cast(float, hi) + 0.5\n ax.set(**{f\"{axis}lim\": (lo, hi)})\n \n if axis_key in self._scales: # TODO when would it not be?\n self._scales[axis_key]._finalize(p, axis_obj)\n \n if (engine_name := p._layout_spec.get(\"engine\", default)) is not default:\n # None is a valid arg for Figure.set_layout_engine, hence `default`\n set_layout_engine(self._figure, engine_name)\n elif p._target is None:\n # Don't modify the layout engine if the user supplied their own\n # matplotlib figure and didn't specify an engine through Plot\n # TODO switch default to \"constrained\"?\n # TODO either way, make configurable\n set_layout_engine(self._figure, \"tight\")\n \n if (extent := p._layout_spec.get(\"extent\")) is not None:\n> engine = get_layout_engine(self._figure)\nE NameError: name 'get_layout_engine' is not defined\n\nseaborn/_core/plot.py:1815: NameError\n_____________________ TestPlotting.test_base_layout_extent _____________________\n[gw1] linux -- Python 3.11.7 /opt/hostedtoolcache/Python/3.11.7/x64/bin/python\n\nself = <tests._core.test_plot.TestPlotting object at 0x7f0664b34450>\n\n def test_base_layout_extent(self):\n \n> p = Plot().layout(engine=None, extent=(.1, .2, .6, 1)).plot()\n\ntests/_core/test_plot.py:1114: \n_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ \nseaborn/_core/plot.py:937: in plot\n return self._plot(pyplot)\nseaborn/_core/plot.py:971: in _plot\n plotter._finalize_figure(self)\n_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ \n\nself = <seaborn._core.plot.Plotter object at 0x7f0656d300d0>\np = <seaborn._core.plot.Plot object at 0x7f0656d301d0>\n\n def _finalize_figure(self, p: Plot) -> None:\n \n for sub in self._subplots:\n ax = sub[\"ax\"]\n for axis in \"xy\":\n axis_key = sub[axis]\n axis_obj = getattr(ax, f\"{axis}axis\")\n \n # Axis limits\n if axis_key in p._limits or axis in p._limits:\n convert_units = getattr(ax, f\"{axis}axis\").convert_units\n a, b = p._limits.get(axis_key) or p._limits[axis]\n lo = a if a is None else convert_units(a)\n hi = b if b is None else convert_units(b)\n if isinstance(a, str):\n lo = cast(float, lo) - 0.5\n if isinstance(b, str):\n hi = cast(float, hi) + 0.5\n ax.set(**{f\"{axis}lim\": (lo, hi)})\n \n if axis_key in self._scales: # TODO when would it not be?\n self._scales[axis_key]._finalize(p, axis_obj)\n \n if (engine_name := p._layout_spec.get(\"engine\", default)) is not default:\n # None is a valid arg for Figure.set_layout_engine, hence `default`\n set_layout_engine(self._figure, engine_name)\n elif p._target is None:\n # Don't modify the layout engine if the user supplied their own\n # matplotlib figure and didn't specify an engine through Plot\n # TODO switch default to \"constrained\"?\n # TODO either way, make configurable\n set_layout_engine(self._figure, \"tight\")\n \n if (extent := p._layout_spec.get(\"extent\")) is not None:\n> engine = get_layout_engine(self._figure)\nE NameError: name 'get_layout_engine' is not defined\n\nseaborn/_core/plot.py:1815: NameError\n\n---------- coverage: platform linux, python 3.11.7-final-0 -----------\nName Stmts Miss Cover\n------------------------------------------------------\nseaborn/__init__.py 16 0 100%\nseaborn/_base.py 766 23 97%\nseaborn/_compat.py 77 47 39%\nseaborn/_core/__init__.py 0 0 100%\nseaborn/_core/data.py 116 5 96%\nseaborn/_core/exceptions.py 10 1 90%\nseaborn/_core/groupby.py 54 0 100%\nseaborn/_core/moves.py 109 0 100%\nseaborn/_core/plot.py 828 13 98%\nseaborn/_core/properties.py 425 4 99%\nseaborn/_core/rules.py 63 1 98%\nseaborn/_core/scales.py 502 49 90%\nseaborn/_core/subplots.py 140 0 100%\nseaborn/_core/typing.py 25 1 96%\nseaborn/_docstrings.py 40 5 88%\nseaborn/_marks/__init__.py 0 0 100%\nseaborn/_marks/area.py 86 3 97%\nseaborn/_marks/bar.py 123 0 100%\nseaborn/_marks/base.py 132 2 98%\nseaborn/_marks/dot.py 92 0 100%\nseaborn/_marks/line.py 116 0 100%\nseaborn/_marks/text.py 33 0 100%\nseaborn/_statistics.py 323 10 97%\nseaborn/_stats/__init__.py 0 0 100%\nseaborn/_stats/aggregation.py 42 1 98%\nseaborn/_stats/base.py 26 1 96%\nseaborn/_stats/counting.py 96 0 100%\nseaborn/_stats/density.py 99 3 97%\nseaborn/_stats/order.py 30 3 90%\nseaborn/_stats/regression.py 23 1 96%\nseaborn/_testing.py 52 4 92%\nseaborn/algorithms.py 60 0 100%\nseaborn/axisgrid.py 1039 32 97%\nseaborn/categorical.py 1204 13 99%\nseaborn/distributions.py 951 36 96%\nseaborn/matrix.py 551 317 42%\nseaborn/miscplot.py 24 0 100%\nseaborn/objects.py 15 0 100%\nseaborn/palettes.py 249 1 99%\nseaborn/rcmod.py 104 0 100%\nseaborn/regression.py 336 46 86%\nseaborn/relational.py 285 1 99%\nseaborn/utils.py 395 23 94%\ntests/__init__.py 0 0 100%\ntests/_core/__init__.py 0 0 100%\ntests/_core/test_data.py 293 2 99%\ntests/_core/test_groupby.py 83 0 100%\ntests/_core/test_moves.py 231 0 100%\ntests/_core/test_plot.py 1550 24 98%\ntests/_core/test_properties.py 365 4 99%\ntests/_core/test_rules.py 72 0 100%\ntests/_core/test_scales.py 547 1 99%\ntests/_core/test_subplots.py 368 0 100%\ntests/_marks/__init__.py 0 0 100%\ntests/_marks/test_area.py 84 0 100%\ntests/_marks/test_bar.py 152 0 100%\ntests/_marks/test_base.py 102 0 100%\ntests/_marks/test_dot.py 136 0 100%\ntests/_marks/test_line.py 298 0 100%\ntests/_marks/test_text.py 98 1 99%\ntests/_stats/__init__.py 0 0 100%\ntests/_stats/test_aggregation.py 84 0 100%\ntests/_stats/test_counting.py 180 0 100%\ntests/_stats/test_density.py 141 8 94%\ntests/_stats/test_order.py 64 1 98%\ntests/_stats/test_regression.py 36 0 100%\ntests/conftest.py 107 0 100%\ntests/test_algorithms.py 110 0 100%\ntests/test_axisgrid.py 1314 6 99%\ntests/test_base.py 1008 3 99%\ntests/test_categorical.py 2161 17 99%\ntests/test_distributions.py 1523 14 99%\ntests/test_docstrings.py 19 0 100%\ntests/test_matrix.py 864 454 47%\ntests/test_miscplot.py 24 0 100%\ntests/test_objects.py 11 0 100%\ntests/test_palettes.py 304 0 100%\ntests/test_rcmod.py 189 21 89%\ntests/test_regression.py 441 46 90%\ntests/test_relational.py 1213 8 99%\ntests/test_statistics.py 489 19 96%\ntests/test_utils.py 382 4 99%\n------------------------------------------------------\nTOTAL 24700 1279 95%\n\n=========================== short test summary info ============================\nFAILED tests/_core/test_plot.py::TestPlotting::test_layout_extent - NameError: name 'get_layout_engine' is not defined\nFAILED tests/_core/test_plot.py::TestPlotting::test_constrained_layout_extent - NameError: name 'get_layout_engine' is not defined\nFAILED tests/_core/test_plot.py::TestPlotting::test_base_layout_extent - NameError: name 'get_layout_engine' is not defined\n====== 3 failed, 2299 passed, 71 skipped, 6 xfailed in 244.97s (0:04:04) =======\nmake: *** [Makefile:4: test] Error 1\n##[error]Process completed with exit code 2.\n"}], "diff": "diff --git a/seaborn/_core/plot.py b/seaborn/_core/plot.py\nindex af4669c6..39ccd2e0 100644\n--- a/seaborn/_core/plot.py\n+++ b/seaborn/_core/plot.py\n@@ -40,7 +40,7 @@ from seaborn._core.typing import (\n )\n from seaborn._core.exceptions import PlotSpecError\n from seaborn._core.rules import categorical_order\n-from seaborn._compat import set_layout_engine\n+from seaborn._compat import get_layout_engine, set_layout_engine\n from seaborn.rcmod import axes_style, plotting_context\n from seaborn.palettes import color_palette\n \n", "difficulty": 2, "changed_files": ["seaborn/_core/plot.py"], "commit_link": "https://github.com/mwaskom/seaborn/tree/2201be21886bb82201f3c3487f5f1468f6e6ac81"}
data/python/2a59b55.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"language": "Python", "id": 18, "repo_owner": "django-import-export", "repo_name": "django-import-export", "head_branch": "remove_count_queries", "workflow_name": ".github/workflows/pre-commit.yml", "workflow_filename": "pre-commit.yml", "workflow_path": ".github/workflows/pre-commit.yml", "contributor": "PetrDlouhy", "sha_fail": "2a59b55e6124b33dca7f48c12845c78130b20fd5", "sha_success": "36786764b3ef1c4c3c8d0db451cea9e1b325616c", "workflow": "on:\n pull_request:\n push:\n branches:\n - main\n\njobs:\n main:\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v3\n - uses: actions/setup-python@v4\n with:\n python-version: 3.x\n - uses: pre-commit/[email protected]\n - uses: pre-commit-ci/[email protected]\n if: always()\n", "logs": [{"step_name": "main/4_Run [email protected]", "log": "##[group]Run pre-commit/[email protected]\nwith:\n extra_args: --all-files\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.12.1/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.12.1/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.1/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.1/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.1/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.12.1/x64/lib\n##[endgroup]\n##[group]Run python -m pip install pre-commit\n\u001b[36;1mpython -m pip install pre-commit\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.12.1/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.12.1/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.1/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.1/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.1/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.12.1/x64/lib\n##[endgroup]\nCollecting pre-commit\n Downloading pre_commit-3.6.0-py2.py3-none-any.whl.metadata (1.3 kB)\nCollecting cfgv>=2.0.0 (from pre-commit)\n Downloading cfgv-3.4.0-py2.py3-none-any.whl.metadata (8.5 kB)\nCollecting identify>=1.0.0 (from pre-commit)\n Downloading identify-2.5.33-py2.py3-none-any.whl.metadata (4.4 kB)\nCollecting nodeenv>=0.11.1 (from pre-commit)\n Downloading nodeenv-1.8.0-py2.py3-none-any.whl.metadata (21 kB)\nCollecting pyyaml>=5.1 (from pre-commit)\n Downloading PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (2.1 kB)\nCollecting virtualenv>=20.10.0 (from pre-commit)\n Downloading virtualenv-20.25.0-py3-none-any.whl.metadata (4.5 kB)\nCollecting setuptools (from nodeenv>=0.11.1->pre-commit)\n Downloading setuptools-69.0.2-py3-none-any.whl.metadata (6.3 kB)\nCollecting distlib<1,>=0.3.7 (from virtualenv>=20.10.0->pre-commit)\n Downloading distlib-0.3.8-py2.py3-none-any.whl.metadata (5.1 kB)\nCollecting filelock<4,>=3.12.2 (from virtualenv>=20.10.0->pre-commit)\n Downloading filelock-3.13.1-py3-none-any.whl.metadata (2.8 kB)\nCollecting platformdirs<5,>=3.9.1 (from virtualenv>=20.10.0->pre-commit)\n Downloading platformdirs-4.1.0-py3-none-any.whl.metadata (11 kB)\nDownloading pre_commit-3.6.0-py2.py3-none-any.whl (204 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 204.0/204.0 kB 6.8 MB/s eta 0:00:00\nDownloading cfgv-3.4.0-py2.py3-none-any.whl (7.2 kB)\nDownloading identify-2.5.33-py2.py3-none-any.whl (98 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 98.9/98.9 kB 26.4 MB/s eta 0:00:00\nDownloading nodeenv-1.8.0-py2.py3-none-any.whl (22 kB)\nDownloading PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (724 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 725.0/725.0 kB 31.5 MB/s eta 0:00:00\nDownloading virtualenv-20.25.0-py3-none-any.whl (3.8 MB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 3.8/3.8 MB 92.4 MB/s eta 0:00:00\nDownloading distlib-0.3.8-py2.py3-none-any.whl (468 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 468.9/468.9 kB 78.2 MB/s eta 0:00:00\nDownloading filelock-3.13.1-py3-none-any.whl (11 kB)\nDownloading platformdirs-4.1.0-py3-none-any.whl (17 kB)\nDownloading setuptools-69.0.2-py3-none-any.whl (819 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 819.5/819.5 kB 93.4 MB/s eta 0:00:00\nInstalling collected packages: distlib, setuptools, pyyaml, platformdirs, identify, filelock, cfgv, virtualenv, nodeenv, pre-commit\nSuccessfully installed cfgv-3.4.0 distlib-0.3.8 filelock-3.13.1 identify-2.5.33 nodeenv-1.8.0 platformdirs-4.1.0 pre-commit-3.6.0 pyyaml-6.0.1 setuptools-69.0.2 virtualenv-20.25.0\n##[group]Run python -m pip freeze --local\n\u001b[36;1mpython -m pip freeze --local\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.12.1/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.12.1/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.1/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.1/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.1/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.12.1/x64/lib\n##[endgroup]\ncfgv==3.4.0\ndistlib==0.3.8\nfilelock==3.13.1\nidentify==2.5.33\nnodeenv==1.8.0\nplatformdirs==4.1.0\npre-commit==3.6.0\nPyYAML==6.0.1\nsetuptools==69.0.2\nvirtualenv==20.25.0\n##[group]Run actions/cache@v3\nwith:\n path: ~/.cache/pre-commit\n key: pre-commit-3|/opt/hostedtoolcache/Python/3.12.1/x64|78083b3309c934e46e7d0a952b7a4c6dc6df8860edaa74474d06dd8c36e1b0a2\n enableCrossOsArchive: false\n fail-on-cache-miss: false\n lookup-only: false\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.12.1/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.12.1/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.1/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.1/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.1/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.12.1/x64/lib\n##[endgroup]\nCache Size: ~15 MB (15847689 B)\n[command]/usr/bin/tar -xf /home/runner/work/_temp/83ce4811-68cf-4484-bf4e-4cfbe36bb31a/cache.tzst -P -C /home/runner/work/django-import-export/django-import-export --use-compress-program unzstd\nCache restored successfully\nCache restored from key: pre-commit-3|/opt/hostedtoolcache/Python/3.12.1/x64|78083b3309c934e46e7d0a952b7a4c6dc6df8860edaa74474d06dd8c36e1b0a2\n##[group]Run pre-commit run --show-diff-on-failure --color=always --all-files\n\u001b[36;1mpre-commit run --show-diff-on-failure --color=always --all-files\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.12.1/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.12.1/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.1/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.1/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.1/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.12.1/x64/lib\n##[endgroup]\nblack....................................................................\u001b[42mPassed\u001b[m\nisort....................................................................\u001b[42mPassed\u001b[m\nflake8...................................................................\u001b[41mFailed\u001b[m\n\u001b[2m- hook id: flake8\u001b[m\n\u001b[2m- exit code: 1\u001b[m\n\n\u001b[1mimport_export/admin.py\u001b[m\u001b[36m:\u001b[m749\u001b[36m:\u001b[m89\u001b[36m:\u001b[m \u001b[1m\u001b[31mE501\u001b[m line too long (96 > 88 characters)\n\u001b[1mimport_export/admin.py\u001b[m\u001b[36m:\u001b[m756\u001b[36m:\u001b[m89\u001b[36m:\u001b[m \u001b[1m\u001b[31mE501\u001b[m line too long (98 > 88 characters)\n\n##[error]Process completed with exit code 1.\n"}], "diff": "diff --git a/import_export/admin.py b/import_export/admin.py\nindex a1b288e9..8bb2db67 100644\n--- a/import_export/admin.py\n+++ b/import_export/admin.py\n@@ -746,14 +746,16 @@ class ExportMixin(BaseExportMixin, ImportExportMixinBase):\n def get_results(self, request):\n \"\"\"\n We override this method because we only call ChangeList.get_queryset()\n- so we don't need anything from this method. The get_results() gets called during\n- ChangeList.__init__() and we do want to avoid unnecessary COUNT queries.\n+ so we don't need anything from this method.\n+ The get_results() gets called during ChangeList.__init__()\n+ and we do want to avoid unnecessary COUNT queries.\n \"\"\"\n pass\n \n cl = ExportChangeList(**changelist_kwargs)\n \n- # get_queryset() is already called during initialization, it is enough to get it's results\n+ # get_queryset() is already called during initialization,\n+ # it is enough to get it's results\n if hasattr(cl, \"queryset\"):\n return cl.queryset\n \n", "difficulty": 0, "changed_files": ["import_export/admin.py"], "commit_link": "https://github.com/django-import-export/django-import-export/tree/2a59b55e6124b33dca7f48c12845c78130b20fd5"}
data/python/2ab9e84.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"language": "Python", "id": 6, "repo_owner": "qtile", "repo_name": "qtile", "head_branch": "lists-match", "workflow_name": "Run pre-commit", "workflow_filename": "pre_commit.yml", "workflow_path": ".github/workflows/pre_commit.yml", "contributor": "jwijenbergh", "sha_fail": "2ab9e843db39063cc9bc33b924cef535eb289894", "sha_success": "26c042804d308f4c1c5b5061ee10da1515129489", "workflow": "name: Run pre-commit\n\non:\n push:\n pull_request:\n\njobs:\n check:\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v4\n - uses: actions/setup-python@v4\n - name: Install dependencies\n run: |\n sudo apt update\n sudo apt install --no-install-recommends libxkbcommon-dev\n - uses: pre-commit/[email protected]\n", "logs": [{"step_name": "check/5_Run [email protected]", "log": "##[group]Run pre-commit/[email protected]\nwith:\n extra_args: --all-files\n##[endgroup]\n##[group]Run python -m pip install pre-commit\n\u001b[36;1mpython -m pip install pre-commit\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\n##[endgroup]\nDefaulting to user installation because normal site-packages is not writeable\nCollecting pre-commit\n Downloading pre_commit-3.6.0-py2.py3-none-any.whl (204 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 204.0/204.0 KB 6.3 MB/s eta 0:00:00\nCollecting nodeenv>=0.11.1\n Downloading nodeenv-1.8.0-py2.py3-none-any.whl (22 kB)\nRequirement already satisfied: pyyaml>=5.1 in /usr/lib/python3/dist-packages (from pre-commit) (5.4.1)\nCollecting cfgv>=2.0.0\n Downloading cfgv-3.4.0-py2.py3-none-any.whl (7.2 kB)\nCollecting virtualenv>=20.10.0\n Downloading virtualenv-20.25.0-py3-none-any.whl (3.8 MB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 3.8/3.8 MB 24.4 MB/s eta 0:00:00\nCollecting identify>=1.0.0\n Downloading identify-2.5.33-py2.py3-none-any.whl (98 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 98.9/98.9 KB 37.4 MB/s eta 0:00:00\nRequirement already satisfied: setuptools in /usr/lib/python3/dist-packages (from nodeenv>=0.11.1->pre-commit) (59.6.0)\nCollecting filelock<4,>=3.12.2\n Downloading filelock-3.13.1-py3-none-any.whl (11 kB)\nRequirement already satisfied: platformdirs<5,>=3.9.1 in /usr/local/lib/python3.10/dist-packages (from virtualenv>=20.10.0->pre-commit) (4.1.0)\nCollecting distlib<1,>=0.3.7\n Downloading distlib-0.3.8-py2.py3-none-any.whl (468 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 468.9/468.9 KB 43.2 MB/s eta 0:00:00\nInstalling collected packages: distlib, nodeenv, identify, filelock, cfgv, virtualenv, pre-commit\nSuccessfully installed cfgv-3.4.0 distlib-0.3.8 filelock-3.13.1 identify-2.5.33 nodeenv-1.8.0 pre-commit-3.6.0 virtualenv-20.25.0\n##[group]Run python -m pip freeze --local\n\u001b[36;1mpython -m pip freeze --local\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\n##[endgroup]\nargcomplete==3.2.1\ncfgv==3.4.0\ndistlib==0.3.8\nfilelock==3.13.1\nidentify==2.5.33\nnodeenv==1.8.0\npackaging==23.2\npipx==1.3.3\nplatformdirs==4.1.0\npre-commit==3.6.0\ntomli==2.0.1\nuserpath==1.9.1\nvirtualenv==20.25.0\n##[group]Run actions/cache@v3\nwith:\n path: ~/.cache/pre-commit\n key: pre-commit-3||05c1fbbb63b353467651b511e6ac241ffd2d8e71749cfabfe5ee6bc8366d2d02\n enableCrossOsArchive: false\n fail-on-cache-miss: false\n lookup-only: false\n##[endgroup]\nCache Size: ~36 MB (37431926 B)\n[command]/usr/bin/tar -xf /home/runner/work/_temp/8e85cf73-4d99-4c40-a6e8-25c81b58e73c/cache.tzst -P -C /home/runner/work/qtile/qtile --use-compress-program unzstd\nCache restored successfully\nCache restored from key: pre-commit-3||05c1fbbb63b353467651b511e6ac241ffd2d8e71749cfabfe5ee6bc8366d2d02\n##[group]Run pre-commit run --show-diff-on-failure --color=always --all-files\n\u001b[36;1mpre-commit run --show-diff-on-failure --color=always --all-files\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\n##[endgroup]\nblack....................................................................\u001b[42mPassed\u001b[m\nisort....................................................................\u001b[42mPassed\u001b[m\nflake8...................................................................\u001b[42mPassed\u001b[m\nmypy.....................................................................\u001b[41mFailed\u001b[m\n\u001b[2m- hook id: mypy\u001b[m\n\u001b[2m- exit code: 1\u001b[m\n\nlibqtile/config.py:886: error: Statement is unreachable [unreachable]\nlibqtile/config.py:890: error: Statement is unreachable [unreachable]\nlibqtile/config.py:894: error: Statement is unreachable [unreachable]\nlibqtile/config.py:911: error: Statement is unreachable [unreachable]\nlibqtile/config.py:915: error: Statement is unreachable [unreachable]\nFound 5 errors in 1 file (checked 179 source files)\n\nvulture..................................................................\u001b[42mPassed\u001b[m\n##[error]Process completed with exit code 1.\n"}], "diff": "diff --git a/libqtile/config.py b/libqtile/config.py\nindex 9801b3ba..6a6163e2 100644\n--- a/libqtile/config.py\n+++ b/libqtile/config.py\n@@ -883,15 +883,15 @@ class Match:\n \n if title is not None:\n if isinstance(title, list): # type: ignore\n- title = convert_deprecated_list(title, \"title\")\n+ title = convert_deprecated_list(title, \"title\") # type: ignore\n self._rules[\"title\"] = title\n if wm_class is not None:\n if isinstance(wm_class, list): # type: ignore\n- wm_class = convert_deprecated_list(wm_class, \"wm_class\")\n+ wm_class = convert_deprecated_list(wm_class, \"wm_class\") # type: ignore\n self._rules[\"wm_class\"] = wm_class\n if wm_instance_class is not None:\n if isinstance(wm_instance_class, list): # type: ignore\n- wm_instance_class = convert_deprecated_list(\n+ wm_instance_class = convert_deprecated_list( # type: ignore\n wm_instance_class, \"wm_instance_class\"\n )\n self._rules[\"wm_instance_class\"] = wm_instance_class\n@@ -908,11 +908,11 @@ class Match:\n \n if role is not None:\n if isinstance(role, list): # type: ignore\n- role = convert_deprecated_list(role, \"role\")\n+ role = convert_deprecated_list(role, \"role\") # type: ignore\n self._rules[\"role\"] = role\n if wm_type is not None:\n if isinstance(wm_type, list): # type: ignore\n- wm_type = convert_deprecated_list(wm_type, \"wm_type\")\n+ wm_type = convert_deprecated_list(wm_type, \"wm_type\") # type: ignore\n self._rules[\"wm_type\"] = wm_type\n \n @staticmethod\n", "difficulty": 1, "changed_files": ["libqtile/config.py"], "commit_link": "https://github.com/qtile/qtile/tree/2ab9e843db39063cc9bc33b924cef535eb289894"}
data/python/2c06ffa.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"language": "Python", "id": 5, "repo_owner": "huggingface", "repo_name": "diffusers", "head_branch": "ipadapterfaceid", "workflow_name": "Run code quality checks", "workflow_filename": "pr_quality.yml", "workflow_path": ".github/workflows/pr_quality.yml", "contributor": "fabiorigano", "sha_fail": "2c06ffa4c9d2c37846c60ad75899b4d72f214ff9", "sha_success": "217d9d073981605acab5200fc841f20c798c1449", "workflow": "name: Run code quality checks\n\non:\n pull_request:\n branches:\n - main\n push:\n branches:\n - main\n\nconcurrency:\n group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}\n cancel-in-progress: true\n\njobs:\n check_code_quality:\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v3\n - name: Set up Python\n uses: actions/setup-python@v4\n with:\n python-version: \"3.8\"\n - name: Install dependencies\n run: |\n python -m pip install --upgrade pip\n pip install .[quality]\n - name: Check quality\n run: |\n ruff check examples tests src utils scripts\n ruff format examples tests src utils scripts --check\n\n check_repository_consistency:\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v3\n - name: Set up Python\n uses: actions/setup-python@v4\n with:\n python-version: \"3.8\"\n - name: Install dependencies\n run: |\n python -m pip install --upgrade pip\n pip install .[quality]\n - name: Check quality\n run: |\n python utils/check_copies.py\n python utils/check_dummies.py\n make deps_table_check_updated\n", "logs": [{"step_name": "check_code_quality/5_Check quality.txt", "log": "##[group]Run ruff check examples tests src utils scripts\n\u001b[36;1mruff check examples tests src utils scripts\u001b[0m\n\u001b[36;1mruff format examples tests src utils scripts --check\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n pythonLocation: /opt/actions-runner/_work/_tool/Python/3.8.18/x64\n PKG_CONFIG_PATH: /opt/actions-runner/_work/_tool/Python/3.8.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/actions-runner/_work/_tool/Python/3.8.18/x64\n Python2_ROOT_DIR: /opt/actions-runner/_work/_tool/Python/3.8.18/x64\n Python3_ROOT_DIR: /opt/actions-runner/_work/_tool/Python/3.8.18/x64\n LD_LIBRARY_PATH: /opt/actions-runner/_work/_tool/Python/3.8.18/x64/lib\n##[endgroup]\nexamples/community/ip_adapter_face_id.py:15:1: I001 [*] Import block is un-sorted or un-formatted\nFound 1 error.\n[*] 1 fixable with the `--fix` option.\n##[error]Process completed with exit code 1.\n"}], "diff": "diff --git a/examples/community/README.md b/examples/community/README.md\nindex f205f3b70..2fdbdb414 100755\n--- a/examples/community/README.md\n+++ b/examples/community/README.md\n@@ -3307,7 +3307,7 @@ pipeline = DiffusionPipeline.from_pretrained(\n torch_dtype=torch.float16,\n scheduler=noise_scheduler,\n vae=vae,\n- custom_pipeline=\"./forked/diffusers/examples/community/ip_adapter_face_id.py\"\n+ custom_pipeline=\"ip_adapter_face_id\"\n )\n pipeline.load_ip_adapter_face_id(\"h94/IP-Adapter-FaceID\", \"ip-adapter-faceid_sd15.bin\")\n pipeline.to(\"cuda\")\ndiff --git a/examples/community/ip_adapter_face_id.py b/examples/community/ip_adapter_face_id.py\nindex e3c5a2c84..d9325742c 100644\n--- a/examples/community/ip_adapter_face_id.py\n+++ b/examples/community/ip_adapter_face_id.py\n@@ -14,12 +14,12 @@\n \n import inspect\n from typing import Any, Callable, Dict, List, Optional, Union\n-from safetensors import safe_open\n \n import torch\n import torch.nn as nn\n import torch.nn.functional as F\n from packaging import version\n+from safetensors import safe_open\n from transformers import CLIPImageProcessor, CLIPTextModel, CLIPTokenizer, CLIPVisionModelWithProjection\n \n from diffusers.configuration_utils import FrozenDict\n@@ -27,20 +27,20 @@ from diffusers.image_processor import VaeImageProcessor\n from diffusers.loaders import FromSingleFileMixin, IPAdapterMixin, LoraLoaderMixin, TextualInversionLoaderMixin\n from diffusers.models import AutoencoderKL, UNet2DConditionModel\n from diffusers.models.attention_processor import FusedAttnProcessor2_0\n-from diffusers.models.lora import adjust_lora_scale_text_encoder, LoRALinearLayer\n+from diffusers.models.lora import LoRALinearLayer, adjust_lora_scale_text_encoder\n+from diffusers.pipelines.pipeline_utils import DiffusionPipeline\n+from diffusers.pipelines.stable_diffusion.pipeline_output import StableDiffusionPipelineOutput\n+from diffusers.pipelines.stable_diffusion.safety_checker import StableDiffusionSafetyChecker\n from diffusers.schedulers import KarrasDiffusionSchedulers\n from diffusers.utils import (\n- _get_model_file,\n USE_PEFT_BACKEND,\n+ _get_model_file,\n deprecate,\n logging,\n scale_lora_layers,\n unscale_lora_layers,\n )\n from diffusers.utils.torch_utils import randn_tensor\n-from diffusers.pipelines.pipeline_utils import DiffusionPipeline\n-from diffusers.pipelines.stable_diffusion.pipeline_output import StableDiffusionPipelineOutput\n-from diffusers.pipelines.stable_diffusion.safety_checker import StableDiffusionSafetyChecker\n \n \n logger = logging.get_logger(__name__) # pylint: disable=invalid-name\n@@ -555,7 +555,7 @@ class IPAdapterFaceIDStableDiffusionPipeline(\n revision=revision,\n subfolder=subfolder,\n user_agent=user_agent,\n- )\n+ )\n if weight_name.endswith(\".safetensors\"):\n state_dict = {\"image_proj\": {}, \"ip_adapter\": {}}\n with safe_open(model_file, framework=\"pt\", device=\"cpu\") as f:\n@@ -1438,7 +1438,7 @@ class IPAdapterFaceIDStableDiffusionPipeline(\n extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta)\n \n # 6.1 Add image embeds for IP-Adapter\n- added_cond_kwargs ={\"image_embeds\": image_embeds} if image_embeds is not None else None\n+ added_cond_kwargs = {\"image_embeds\": image_embeds} if image_embeds is not None else None\n \n # 6.2 Optionally get Guidance Scale Embedding\n timestep_cond = None\n", "difficulty": 0, "changed_files": ["examples/community/README.md", "examples/community/ip_adapter_face_id.py"], "commit_link": "https://github.com/huggingface/diffusers/tree/2c06ffa4c9d2c37846c60ad75899b4d72f214ff9"}
data/python/2f0605c.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"language": "Python", "id": 42, "repo_owner": "django-import-export", "repo_name": "django-import-export", "head_branch": "fix-declared-model-fields-altering-export-order", "workflow_name": ".github/workflows/pre-commit.yml", "workflow_filename": "pre-commit.yml", "workflow_path": ".github/workflows/pre-commit.yml", "contributor": "cocorocho", "sha_fail": "2f0605c9ec79b7a675728cb525ad55b36ade2e93", "sha_success": "d1e35b26f1cfc1bd08b03ac606fa8bcd26dc91cb", "workflow": "on:\n pull_request:\n push:\n branches:\n - main\n\njobs:\n main:\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v3\n - uses: actions/setup-python@v4\n with:\n python-version: 3.x\n - uses: pre-commit/[email protected]\n - uses: pre-commit-ci/[email protected]\n if: always()\n", "logs": [{"step_name": "main/4_Run [email protected]", "log": "##[group]Run pre-commit/[email protected]\nwith:\n extra_args: --all-files\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.12.0/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.12.0/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.0/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.0/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.0/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.12.0/x64/lib\n##[endgroup]\n##[group]Run python -m pip install pre-commit\n\u001b[36;1mpython -m pip install pre-commit\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.12.0/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.12.0/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.0/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.0/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.0/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.12.0/x64/lib\n##[endgroup]\nCollecting pre-commit\n Downloading pre_commit-3.5.0-py2.py3-none-any.whl.metadata (1.3 kB)\nCollecting cfgv>=2.0.0 (from pre-commit)\n Downloading cfgv-3.4.0-py2.py3-none-any.whl.metadata (8.5 kB)\nCollecting identify>=1.0.0 (from pre-commit)\n Downloading identify-2.5.32-py2.py3-none-any.whl.metadata (4.4 kB)\nCollecting nodeenv>=0.11.1 (from pre-commit)\n Downloading nodeenv-1.8.0-py2.py3-none-any.whl.metadata (21 kB)\nCollecting pyyaml>=5.1 (from pre-commit)\n Downloading PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (2.1 kB)\nCollecting virtualenv>=20.10.0 (from pre-commit)\n Downloading virtualenv-20.24.7-py3-none-any.whl.metadata (4.5 kB)\nCollecting setuptools (from nodeenv>=0.11.1->pre-commit)\n Downloading setuptools-69.0.2-py3-none-any.whl.metadata (6.3 kB)\nCollecting distlib<1,>=0.3.7 (from virtualenv>=20.10.0->pre-commit)\n Downloading distlib-0.3.7-py2.py3-none-any.whl.metadata (5.1 kB)\nCollecting filelock<4,>=3.12.2 (from virtualenv>=20.10.0->pre-commit)\n Downloading filelock-3.13.1-py3-none-any.whl.metadata (2.8 kB)\nCollecting platformdirs<5,>=3.9.1 (from virtualenv>=20.10.0->pre-commit)\n Downloading platformdirs-4.0.0-py3-none-any.whl.metadata (11 kB)\nDownloading pre_commit-3.5.0-py2.py3-none-any.whl (203 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 203.7/203.7 kB 15.8 MB/s eta 0:00:00\nDownloading cfgv-3.4.0-py2.py3-none-any.whl (7.2 kB)\nDownloading identify-2.5.32-py2.py3-none-any.whl (98 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 98.9/98.9 kB 32.6 MB/s eta 0:00:00\nDownloading nodeenv-1.8.0-py2.py3-none-any.whl (22 kB)\nDownloading PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (724 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 725.0/725.0 kB 49.7 MB/s eta 0:00:00\nDownloading virtualenv-20.24.7-py3-none-any.whl (3.8 MB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 3.8/3.8 MB 124.4 MB/s eta 0:00:00\nDownloading distlib-0.3.7-py2.py3-none-any.whl (468 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 468.9/468.9 kB 84.7 MB/s eta 0:00:00\nDownloading filelock-3.13.1-py3-none-any.whl (11 kB)\nDownloading platformdirs-4.0.0-py3-none-any.whl (17 kB)\nDownloading setuptools-69.0.2-py3-none-any.whl (819 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 819.5/819.5 kB 99.2 MB/s eta 0:00:00\nInstalling collected packages: distlib, setuptools, pyyaml, platformdirs, identify, filelock, cfgv, virtualenv, nodeenv, pre-commit\nSuccessfully installed cfgv-3.4.0 distlib-0.3.7 filelock-3.13.1 identify-2.5.32 nodeenv-1.8.0 platformdirs-4.0.0 pre-commit-3.5.0 pyyaml-6.0.1 setuptools-69.0.2 virtualenv-20.24.7\n\n[notice] A new release of pip is available: 23.2.1 -> 23.3.1\n[notice] To update, run: pip install --upgrade pip\n##[group]Run python -m pip freeze --local\n\u001b[36;1mpython -m pip freeze --local\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.12.0/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.12.0/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.0/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.0/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.0/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.12.0/x64/lib\n##[endgroup]\ncfgv==3.4.0\ndistlib==0.3.7\nfilelock==3.13.1\nidentify==2.5.32\nnodeenv==1.8.0\nplatformdirs==4.0.0\npre-commit==3.5.0\nPyYAML==6.0.1\nsetuptools==69.0.2\nvirtualenv==20.24.7\n##[group]Run actions/cache@v3\nwith:\n path: ~/.cache/pre-commit\n key: pre-commit-3|/opt/hostedtoolcache/Python/3.12.0/x64|78083b3309c934e46e7d0a952b7a4c6dc6df8860edaa74474d06dd8c36e1b0a2\n enableCrossOsArchive: false\n fail-on-cache-miss: false\n lookup-only: false\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.12.0/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.12.0/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.0/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.0/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.0/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.12.0/x64/lib\n##[endgroup]\nCache Size: ~15 MB (15834028 B)\n[command]/usr/bin/tar -xf /home/runner/work/_temp/98029e33-8cfe-40ab-80a4-dbe66e8ec82f/cache.tzst -P -C /home/runner/work/django-import-export/django-import-export --use-compress-program unzstd\nCache restored successfully\nCache restored from key: pre-commit-3|/opt/hostedtoolcache/Python/3.12.0/x64|78083b3309c934e46e7d0a952b7a4c6dc6df8860edaa74474d06dd8c36e1b0a2\n##[group]Run pre-commit run --show-diff-on-failure --color=always --all-files\n\u001b[36;1mpre-commit run --show-diff-on-failure --color=always --all-files\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.12.0/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.12.0/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.0/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.0/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.0/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.12.0/x64/lib\n##[endgroup]\nblack....................................................................\u001b[42mPassed\u001b[m\nisort....................................................................\u001b[42mPassed\u001b[m\nflake8...................................................................\u001b[41mFailed\u001b[m\n\u001b[2m- hook id: flake8\u001b[m\n\u001b[2m- exit code: 1\u001b[m\n\n\u001b[1mimport_export/resources.py\u001b[m\u001b[36m:\u001b[m1360\u001b[36m:\u001b[m89\u001b[36m:\u001b[m \u001b[1m\u001b[31mE501\u001b[m line too long (101 > 88 characters)\n\n##[error]Process completed with exit code 1.\n"}], "diff": "diff --git a/import_export/resources.py b/import_export/resources.py\nindex 7d7ad475..99c3fe91 100644\n--- a/import_export/resources.py\n+++ b/import_export/resources.py\n@@ -1357,7 +1357,8 @@ class ModelDeclarativeMetaclass(DeclarativeMetaclass):\n continue\n \n if f.name in declared_fields:\n- # If model field is declared in `ModelResource`, remove it from `declared_fields`\n+ # If model field is declared in `ModelResource`,\n+ # remove it from `declared_fields`\n # to keep exact order of model fields\n field = declared_fields.pop(f.name)\n else:\n", "difficulty": 0, "changed_files": ["import_export/resources.py"], "commit_link": "https://github.com/django-import-export/django-import-export/tree/2f0605c9ec79b7a675728cb525ad55b36ade2e93"}
data/python/3dd8e44.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"language": "Python", "id": 46, "repo_owner": "huggingface", "repo_name": "accelerate", "head_branch": "device-agnostic-testing", "workflow_name": "Quality Check", "workflow_filename": "quality.yml", "workflow_path": ".github/workflows/quality.yml", "contributor": "statelesshz", "sha_fail": "3dd8e4404a0ce2e29db4911dc2cd7e94755be631", "sha_success": "c9a13fe9a890cfb48ee6c7b1c6abd64c8fb432da", "workflow": "name: Quality Check\n\non: [pull_request]\n\njobs:\n quality:\n runs-on: ubuntu-latest\n steps:\n - uses: actions/[email protected]\n - name: Set up Python 3.8\n uses: actions/setup-python@v3\n with:\n python-version: 3.8\n - name: Install Python dependencies\n run: pip install -e .[quality]\n - name: Run Quality check\n run: make quality\n - name: Check if failure\n if: ${{ failure() }}\n run: |\n echo \"Quality check failed. Please ensure the right dependency versions are installed with 'pip install -e .[quality]' and rerun 'make style; make quality;'\" >> $GITHUB_STEP_SUMMARY\n\n", "logs": [{"step_name": "quality/5_Run Quality check.txt", "log": "##[group]Run make quality\n\u001b[36;1mmake quality\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.8.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.8.18/x64/lib\n##[endgroup]\nblack --required-version 23 --check tests src examples benchmarks utils\nAll done! \u2728 \ud83c\udf70 \u2728\n118 files would be left unchanged.\nruff tests src examples benchmarks utils\ntests/deepspeed/test_deepspeed.py:15:1: I001 [*] Import block is un-sorted or un-formatted\ntests/fsdp/test_fsdp.py:16:1: I001 [*] Import block is un-sorted or un-formatted\nFound 2 errors.\n[*] 2 fixable with the `--fix` option.\nmake: *** [Makefile:16: quality] Error 1\n##[error]Process completed with exit code 2.\n"}], "diff": "diff --git a/tests/deepspeed/test_deepspeed.py b/tests/deepspeed/test_deepspeed.py\nindex 035965c..612f234 100644\n--- a/tests/deepspeed/test_deepspeed.py\n+++ b/tests/deepspeed/test_deepspeed.py\n@@ -36,9 +36,9 @@ from accelerate.test_utils.testing import (\n AccelerateTestCase,\n TempDirTestCase,\n execute_subprocess_async,\n- require_non_cpu,\n require_deepspeed,\n require_multi_device,\n+ require_non_cpu,\n slow,\n )\n from accelerate.test_utils.training import RegressionDataset\ndiff --git a/tests/fsdp/test_fsdp.py b/tests/fsdp/test_fsdp.py\nindex d939991..c494f5e 100644\n--- a/tests/fsdp/test_fsdp.py\n+++ b/tests/fsdp/test_fsdp.py\n@@ -28,9 +28,9 @@ from accelerate.test_utils.testing import (\n AccelerateTestCase,\n TempDirTestCase,\n execute_subprocess_async,\n- require_non_cpu,\n require_fsdp,\n require_multi_device,\n+ require_non_cpu,\n slow,\n )\n from accelerate.utils.constants import (\n", "difficulty": 0, "changed_files": ["tests/deepspeed/test_deepspeed.py", "tests/fsdp/test_fsdp.py"], "commit_link": "https://github.com/huggingface/accelerate/tree/3dd8e4404a0ce2e29db4911dc2cd7e94755be631"}
data/python/3ed7a88.json ADDED
The diff for this file is too large to render. See raw diff
 
data/python/43dd59c.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"language": "Python", "id": 21, "repo_owner": "yt-dlp", "repo_name": "yt-dlp", "head_branch": "master", "workflow_name": "Quick Test", "workflow_filename": "quick-test.yml", "workflow_path": ".github/workflows/quick-test.yml", "contributor": "Kenshin9977", "sha_fail": "43dd59c3137df77f5dd22cef4cb7bedfe9f6b12e", "sha_success": "0bedc0ee590f45a6229a4b78f83297f8c07392bb", "workflow": "name: Quick Test\non: [push, pull_request]\npermissions:\n contents: read\n\njobs:\n tests:\n name: Core Test\n if: \"!contains(github.event.head_commit.message, 'ci skip all')\"\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v3\n - name: Set up Python 3.11\n uses: actions/setup-python@v4\n with:\n python-version: '3.11'\n - name: Install test requirements\n run: pip install pytest pycryptodomex\n - name: Run tests\n run: |\n python3 -m yt_dlp -v || true\n ./devscripts/run_tests.sh core\n flake8:\n name: Linter\n if: \"!contains(github.event.head_commit.message, 'ci skip all')\"\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v3\n - uses: actions/setup-python@v4\n - name: Install flake8\n run: pip install flake8\n - name: Make lazy extractors\n run: python devscripts/make_lazy_extractors.py\n - name: Run flake8\n run: flake8 .\n", "logs": [{"step_name": "Core Test/5_Run tests.txt", "log": "##[group]Run python3 -m yt_dlp -v || true\n\u001b[36;1mpython3 -m yt_dlp -v || true\u001b[0m\n\u001b[36;1m./devscripts/run_tests.sh core\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.11.6/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.11.6/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.11.6/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.11.6/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.11.6/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.11.6/x64/lib\n##[endgroup]\n[debug] Command-line config: ['-v']\n[debug] Encodings: locale UTF-8, fs utf-8, pref UTF-8, out utf-8 (No ANSI), error utf-8 (No ANSI), screen utf-8 (No ANSI)\n[debug] yt-dlp version [email protected] [b634ba742] (source)\n[debug] Lazy loading extractors is disabled\n[debug] Git HEAD: f8abf45\n[debug] Python 3.11.6 (CPython x86_64 64bit) - Linux-6.2.0-1015-azure-x86_64-with-glibc2.35 (OpenSSL 3.0.2 15 Mar 2022, glibc 2.35)\n[debug] exe versions: none\n[debug] Optional libraries: Cryptodome-3.19.0, sqlite3-3.37.2\n[debug] Proxy map: {}\n[debug] Request Handlers: urllib\n[debug] Loaded 1890 extractors\n\nUsage: yt-dlp [OPTIONS] URL [URL...]\n\nyt-dlp: error: You must provide at least one URL.\nType yt-dlp --help to see a list of all options.\n============================= test session starts ==============================\nplatform linux -- Python 3.11.6, pytest-7.4.3, pluggy-1.3.0 -- /opt/hostedtoolcache/Python/3.11.6/x64/bin/python3\ncachedir: .pytest_cache\nrootdir: /home/runner/work/yt-dlp/yt-dlp\nconfigfile: setup.cfg\ncollecting ... collected 6320 items / 5705 deselected / 615 selected\n\ntest/test_InfoExtractor.py::TestInfoExtractor::test_download_json PASSED [ 0%]\ntest/test_InfoExtractor.py::TestInfoExtractor::test_extract_jwplayer_data_realworld PASSED [ 0%]\ntest/test_InfoExtractor.py::TestInfoExtractor::test_html_search_meta PASSED [ 0%]\ntest/test_InfoExtractor.py::TestInfoExtractor::test_html_search_regex PASSED [ 0%]\ntest/test_InfoExtractor.py::TestInfoExtractor::test_ie_key PASSED [ 0%]\ntest/test_InfoExtractor.py::TestInfoExtractor::test_opengraph PASSED [ 0%]\ntest/test_InfoExtractor.py::TestInfoExtractor::test_parse_f4m_formats PASSED [ 1%]\ntest/test_InfoExtractor.py::TestInfoExtractor::test_parse_html5_media_entries PASSED [ 1%]\ntest/test_InfoExtractor.py::TestInfoExtractor::test_parse_ism_formats PASSED [ 1%]\ntest/test_InfoExtractor.py::TestInfoExtractor::test_parse_m3u8_formats PASSED [ 1%]\ntest/test_InfoExtractor.py::TestInfoExtractor::test_parse_mpd_formats PASSED [ 1%]\ntest/test_InfoExtractor.py::TestInfoExtractor::test_parse_xspf PASSED [ 1%]\ntest/test_InfoExtractor.py::TestInfoExtractor::test_response_with_expected_status_returns_content PASSED [ 2%]\ntest/test_InfoExtractor.py::TestInfoExtractor::test_search_json_ld_realworld PASSED [ 2%]\ntest/test_YoutubeDL.py::TestFormatSelection::test_audio_only_extractor_format_selection PASSED [ 2%]\ntest/test_YoutubeDL.py::TestFormatSelection::test_default_format_spec PASSED [ 2%]\ntest/test_YoutubeDL.py::TestFormatSelection::test_format_filtering PASSED [ 2%]\ntest/test_YoutubeDL.py::TestFormatSelection::test_format_not_available PASSED [ 2%]\ntest/test_YoutubeDL.py::TestFormatSelection::test_format_selection PASSED [ 3%]\ntest/test_YoutubeDL.py::TestFormatSelection::test_format_selection_audio PASSED [ 3%]\ntest/test_YoutubeDL.py::TestFormatSelection::test_format_selection_audio_exts PASSED [ 3%]\ntest/test_YoutubeDL.py::TestFormatSelection::test_format_selection_issue_10083 PASSED [ 3%]\ntest/test_YoutubeDL.py::TestFormatSelection::test_format_selection_string_ops PASSED [ 3%]\ntest/test_YoutubeDL.py::TestFormatSelection::test_format_selection_video PASSED [ 3%]\ntest/test_YoutubeDL.py::TestFormatSelection::test_invalid_format_specs PASSED [ 4%]\ntest/test_YoutubeDL.py::TestFormatSelection::test_prefer_free_formats PASSED [ 4%]\ntest/test_YoutubeDL.py::TestFormatSelection::test_youtube_format_selection PASSED [ 4%]\ntest/test_YoutubeDL.py::TestYoutubeDL::test_add_extra_info PASSED [ 4%]\ntest/test_YoutubeDL.py::TestYoutubeDL::test_add_headers_cookie PASSED [ 4%]\ntest/test_YoutubeDL.py::TestYoutubeDL::test_do_not_override_ie_key_in_url_transparent PASSED [ 4%]\ntest/test_YoutubeDL.py::TestYoutubeDL::test_format_note PASSED [ 5%]\ntest/test_YoutubeDL.py::TestYoutubeDL::test_header_cookies PASSED [ 5%]\ntest/test_YoutubeDL.py::TestYoutubeDL::test_ignoreerrors_for_playlist_with_url_transparent_iterable_entries PASSED [ 5%]\ntest/test_YoutubeDL.py::TestYoutubeDL::test_infojson_cookies PASSED [ 5%]\ntest/test_YoutubeDL.py::TestYoutubeDL::test_match_filter PASSED [ 5%]\ntest/test_YoutubeDL.py::TestYoutubeDL::test_playlist_items_selection PASSED [ 5%]\ntest/test_YoutubeDL.py::TestYoutubeDL::test_postprocessors PASSED [ 6%]\ntest/test_YoutubeDL.py::TestYoutubeDL::test_prepare_outtmpl_and_filename PASSED [ 6%]\ntest/test_YoutubeDL.py::TestYoutubeDL::test_subtitles PASSED [ 6%]\ntest/test_YoutubeDLCookieJar.py::TestYoutubeDLCookieJar::test_get_cookie_header PASSED [ 6%]\ntest/test_YoutubeDLCookieJar.py::TestYoutubeDLCookieJar::test_get_cookies_for_url PASSED [ 6%]\ntest/test_YoutubeDLCookieJar.py::TestYoutubeDLCookieJar::test_keep_session_cookies PASSED [ 6%]\ntest/test_YoutubeDLCookieJar.py::TestYoutubeDLCookieJar::test_malformed_cookies PASSED [ 6%]\ntest/test_YoutubeDLCookieJar.py::TestYoutubeDLCookieJar::test_strip_httponly_prefix PASSED [ 7%]\ntest/test_aes.py::TestAES::test_cbc_decrypt PASSED [ 7%]\ntest/test_aes.py::TestAES::test_cbc_encrypt PASSED [ 7%]\ntest/test_aes.py::TestAES::test_ctr_decrypt PASSED [ 7%]\ntest/test_aes.py::TestAES::test_ctr_encrypt PASSED [ 7%]\ntest/test_aes.py::TestAES::test_decrypt_text PASSED [ 7%]\ntest/test_aes.py::TestAES::test_ecb_decrypt PASSED [ 8%]\ntest/test_aes.py::TestAES::test_ecb_encrypt PASSED [ 8%]\ntest/test_aes.py::TestAES::test_encrypt PASSED [ 8%]\ntest/test_aes.py::TestAES::test_gcm_decrypt PASSED [ 8%]\ntest/test_aes.py::TestAES::test_key_expansion PASSED [ 8%]\ntest/test_aes.py::TestAES::test_pad_block PASSED [ 8%]\ntest/test_all_urls.py::TestAllURLsMatching::test_facebook_matching PASSED [ 9%]\ntest/test_all_urls.py::TestAllURLsMatching::test_keywords PASSED [ 9%]\ntest/test_all_urls.py::TestAllURLsMatching::test_no_duplicated_ie_names PASSED [ 9%]\ntest/test_all_urls.py::TestAllURLsMatching::test_no_duplicates PASSED [ 9%]\ntest/test_all_urls.py::TestAllURLsMatching::test_pbs PASSED [ 9%]\ntest/test_all_urls.py::TestAllURLsMatching::test_soundcloud_not_matching_sets PASSED [ 9%]\ntest/test_all_urls.py::TestAllURLsMatching::test_tumblr PASSED [ 10%]\ntest/test_all_urls.py::TestAllURLsMatching::test_vimeo_matching PASSED [ 10%]\ntest/test_all_urls.py::TestAllURLsMatching::test_youtube_channel_matching PASSED [ 10%]\ntest/test_all_urls.py::TestAllURLsMatching::test_youtube_feeds PASSED [ 10%]\ntest/test_all_urls.py::TestAllURLsMatching::test_youtube_matching PASSED [ 10%]\ntest/test_all_urls.py::TestAllURLsMatching::test_youtube_playlist_matching PASSED [ 10%]\ntest/test_all_urls.py::TestAllURLsMatching::test_youtube_search_matching PASSED [ 11%]\ntest/test_all_urls.py::TestAllURLsMatching::test_youtube_user_matching PASSED [ 11%]\ntest/test_cache.py::TestCache::test_cache PASSED [ 11%]\ntest/test_compat.py::TestCompat::test_compat_etree_fromstring PASSED [ 11%]\ntest/test_compat.py::TestCompat::test_compat_etree_fromstring_doctype PASSED [ 11%]\ntest/test_compat.py::TestCompat::test_compat_expanduser PASSED [ 11%]\ntest/test_compat.py::TestCompat::test_compat_passthrough PASSED [ 12%]\ntest/test_compat.py::TestCompat::test_compat_urllib_parse_unquote PASSED [ 12%]\ntest/test_compat.py::TestCompat::test_compat_urllib_parse_unquote_plus PASSED [ 12%]\ntest/test_compat.py::TestCompat::test_compat_urllib_parse_urlencode PASSED [ 12%]\ntest/test_compat.py::TestCompat::test_struct_unpack PASSED [ 12%]\ntest/test_config.py::TestConfig::test_config__ENVIRON_DEFAULTS_sanity PASSED [ 12%]\ntest/test_config.py::TestConfig::test_config_all_environ_values PASSED [ 13%]\ntest/test_config.py::TestConfig::test_config_default_expected_locations PASSED [ 13%]\ntest/test_config.py::TestConfig::test_config_default_grouping PASSED [ 13%]\ntest/test_config.py::TestConfig::test_config_early_exit_commandline PASSED [ 13%]\ntest/test_config.py::TestConfig::test_config_early_exit_files PASSED [ 13%]\ntest/test_config.py::TestConfig::test_config_override_commandline PASSED [ 13%]\ntest/test_config.py::TestConfig::test_config_override_files PASSED [ 13%]\ntest/test_cookies.py::TestCookies::test_chrome_cookie_decryptor_linux_derive_key PASSED [ 14%]\ntest/test_cookies.py::TestCookies::test_chrome_cookie_decryptor_linux_v10 PASSED [ 14%]\ntest/test_cookies.py::TestCookies::test_chrome_cookie_decryptor_linux_v11 PASSED [ 14%]\ntest/test_cookies.py::TestCookies::test_chrome_cookie_decryptor_mac_derive_key PASSED [ 14%]\ntest/test_cookies.py::TestCookies::test_chrome_cookie_decryptor_mac_v10 PASSED [ 14%]\ntest/test_cookies.py::TestCookies::test_chrome_cookie_decryptor_windows_v10 PASSED [ 14%]\ntest/test_cookies.py::TestCookies::test_get_desktop_environment PASSED [ 15%]\ntest/test_cookies.py::TestCookies::test_pbkdf2_sha1 PASSED [ 15%]\ntest/test_cookies.py::TestCookies::test_safari_cookie_parsing PASSED [ 15%]\ntest/test_cookies.py::TestLenientSimpleCookie::test_lenient_parsing PASSED [ 15%]\ntest/test_cookies.py::TestLenientSimpleCookie::test_parsing PASSED [ 15%]\ntest/test_downloader_external.py::TestHttpieFD::test_make_cmd PASSED [ 15%]\ntest/test_downloader_external.py::TestAxelFD::test_make_cmd PASSED [ 16%]\ntest/test_downloader_external.py::TestWgetFD::test_make_cmd PASSED [ 16%]\ntest/test_downloader_external.py::TestCurlFD::test_make_cmd PASSED [ 16%]\ntest/test_downloader_external.py::TestAria2cFD::test_make_cmd PASSED [ 16%]\ntest/test_downloader_external.py::TestFFmpegFD::test_make_cmd SKIPPED [ 16%]\ntest/test_downloader_http.py::TestHttpFD::test_chunked FAILED [ 16%]\ntest/test_downloader_http.py::TestHttpFD::test_regular FAILED [ 17%]\ntest/test_execution.py::TestExecution::test_cmdline_umlauts PASSED [ 17%]\ntest/test_execution.py::TestExecution::test_import PASSED [ 17%]\ntest/test_execution.py::TestExecution::test_lazy_extractors PASSED [ 17%]\ntest/test_execution.py::TestExecution::test_main_exec PASSED [ 17%]\ntest/test_execution.py::TestExecution::test_module_exec PASSED [ 17%]\ntest/test_jsinterp.py::TestJSInterpreter::test_add PASSED [ 18%]\ntest/test_jsinterp.py::TestJSInterpreter::test_array_access PASSED [ 18%]\ntest/test_jsinterp.py::TestJSInterpreter::test_assignments PASSED [ 18%]\ntest/test_jsinterp.py::TestJSInterpreter::test_basic PASSED [ 18%]\ntest/test_jsinterp.py::TestJSInterpreter::test_bitwise_operators_overflow PASSED [ 18%]\ntest/test_jsinterp.py::TestJSInterpreter::test_bitwise_operators_typecast PASSED [ 18%]\ntest/test_jsinterp.py::TestJSInterpreter::test_builtins PASSED [ 19%]\ntest/test_jsinterp.py::TestJSInterpreter::test_calc PASSED [ 19%]\ntest/test_jsinterp.py::TestJSInterpreter::test_call PASSED [ 19%]\ntest/test_jsinterp.py::TestJSInterpreter::test_catch PASSED [ 19%]\ntest/test_jsinterp.py::TestJSInterpreter::test_char_code_at PASSED [ 19%]\ntest/test_jsinterp.py::TestJSInterpreter::test_comma PASSED [ 19%]\ntest/test_jsinterp.py::TestJSInterpreter::test_comments SKIPPED (Not...) [ 20%]\ntest/test_jsinterp.py::TestJSInterpreter::test_date PASSED [ 20%]\ntest/test_jsinterp.py::TestJSInterpreter::test_div PASSED [ 20%]\ntest/test_jsinterp.py::TestJSInterpreter::test_empty_return PASSED [ 20%]\ntest/test_jsinterp.py::TestJSInterpreter::test_exp PASSED [ 20%]\ntest/test_jsinterp.py::TestJSInterpreter::test_finally PASSED [ 20%]\ntest/test_jsinterp.py::TestJSInterpreter::test_for_loop PASSED [ 20%]\ntest/test_jsinterp.py::TestJSInterpreter::test_for_loop_break PASSED [ 21%]\ntest/test_jsinterp.py::TestJSInterpreter::test_for_loop_continue PASSED [ 21%]\ntest/test_jsinterp.py::TestJSInterpreter::test_for_loop_try PASSED [ 21%]\ntest/test_jsinterp.py::TestJSInterpreter::test_if PASSED [ 21%]\ntest/test_jsinterp.py::TestJSInterpreter::test_literal_list PASSED [ 21%]\ntest/test_jsinterp.py::TestJSInterpreter::test_mod PASSED [ 21%]\ntest/test_jsinterp.py::TestJSInterpreter::test_morespace PASSED [ 22%]\ntest/test_jsinterp.py::TestJSInterpreter::test_mul PASSED [ 22%]\ntest/test_jsinterp.py::TestJSInterpreter::test_negative PASSED [ 22%]\ntest/test_jsinterp.py::TestJSInterpreter::test_nested_try PASSED [ 22%]\ntest/test_jsinterp.py::TestJSInterpreter::test_null PASSED [ 22%]\ntest/test_jsinterp.py::TestJSInterpreter::test_object PASSED [ 22%]\ntest/test_jsinterp.py::TestJSInterpreter::test_operators PASSED [ 23%]\ntest/test_jsinterp.py::TestJSInterpreter::test_packed SKIPPED (Not i...) [ 23%]\ntest/test_jsinterp.py::TestJSInterpreter::test_parens PASSED [ 23%]\ntest/test_jsinterp.py::TestJSInterpreter::test_precedence PASSED [ 23%]\ntest/test_jsinterp.py::TestJSInterpreter::test_quotes PASSED [ 23%]\ntest/test_jsinterp.py::TestJSInterpreter::test_regex PASSED [ 23%]\ntest/test_jsinterp.py::TestJSInterpreter::test_replace SKIPPED (Not ...) [ 24%]\ntest/test_jsinterp.py::TestJSInterpreter::test_return_function PASSED [ 24%]\ntest/test_jsinterp.py::TestJSInterpreter::test_strange_chars PASSED [ 24%]\ntest/test_jsinterp.py::TestJSInterpreter::test_sub PASSED [ 24%]\ntest/test_jsinterp.py::TestJSInterpreter::test_switch PASSED [ 24%]\ntest/test_jsinterp.py::TestJSInterpreter::test_switch_default PASSED [ 24%]\ntest/test_jsinterp.py::TestJSInterpreter::test_try PASSED [ 25%]\ntest/test_jsinterp.py::TestJSInterpreter::test_undefined PASSED [ 25%]\ntest/test_jsinterp.py::TestJSInterpreter::test_void PASSED [ 25%]\ntest/test_netrc.py::TestNetRc::test_netrc_present PASSED [ 25%]\ntest/test_networking.py::TestHTTPRequestHandler::test_verify_cert[Urllib] PASSED [ 25%]\ntest/test_networking.py::TestHTTPRequestHandler::test_verify_cert[Requests] SKIPPED [ 25%]\ntest/test_networking.py::TestHTTPRequestHandler::test_ssl_error[Urllib] PASSED [ 26%]\ntest/test_networking.py::TestHTTPRequestHandler::test_ssl_error[Requests] SKIPPED [ 26%]\ntest/test_networking.py::TestHTTPRequestHandler::test_percent_encode[Urllib] PASSED [ 26%]\ntest/test_networking.py::TestHTTPRequestHandler::test_percent_encode[Requests] SKIPPED [ 26%]\ntest/test_networking.py::TestHTTPRequestHandler::test_remove_dot_segments[Urllib] PASSED [ 26%]\ntest/test_networking.py::TestHTTPRequestHandler::test_remove_dot_segments[Requests] SKIPPED [ 26%]\ntest/test_networking.py::TestHTTPRequestHandler::test_unicode_path_redirection[Urllib] PASSED [ 26%]\ntest/test_networking.py::TestHTTPRequestHandler::test_unicode_path_redirection[Requests] SKIPPED [ 27%]\ntest/test_networking.py::TestHTTPRequestHandler::test_raise_http_error[Urllib] PASSED [ 27%]\ntest/test_networking.py::TestHTTPRequestHandler::test_raise_http_error[Requests] SKIPPED [ 27%]\ntest/test_networking.py::TestHTTPRequestHandler::test_response_url[Urllib] PASSED [ 27%]\ntest/test_networking.py::TestHTTPRequestHandler::test_response_url[Requests] SKIPPED [ 27%]\ntest/test_networking.py::TestHTTPRequestHandler::test_redirect[Urllib] PASSED [ 27%]\ntest/test_networking.py::TestHTTPRequestHandler::test_redirect[Requests] SKIPPED [ 28%]\ntest/test_networking.py::TestHTTPRequestHandler::test_request_cookie_header[Urllib] PASSED [ 28%]\ntest/test_networking.py::TestHTTPRequestHandler::test_request_cookie_header[Requests] SKIPPED [ 28%]\ntest/test_networking.py::TestHTTPRequestHandler::test_redirect_loop[Urllib] PASSED [ 28%]\ntest/test_networking.py::TestHTTPRequestHandler::test_redirect_loop[Requests] SKIPPED [ 28%]\ntest/test_networking.py::TestHTTPRequestHandler::test_incompleteread[Urllib] PASSED [ 28%]\ntest/test_networking.py::TestHTTPRequestHandler::test_incompleteread[Requests] SKIPPED [ 29%]\ntest/test_networking.py::TestHTTPRequestHandler::test_cookies[Urllib] PASSED [ 29%]\ntest/test_networking.py::TestHTTPRequestHandler::test_cookies[Requests] SKIPPED [ 29%]\ntest/test_networking.py::TestHTTPRequestHandler::test_headers[Urllib] PASSED [ 29%]\ntest/test_networking.py::TestHTTPRequestHandler::test_headers[Requests] SKIPPED [ 29%]\ntest/test_networking.py::TestHTTPRequestHandler::test_timeout[Urllib] PASSED [ 29%]\ntest/test_networking.py::TestHTTPRequestHandler::test_timeout[Requests] SKIPPED [ 30%]\nTraceback (most recent call last):\n File \"/opt/hostedtoolcache/Python/3.11.6/x64/lib/python3.11/socketserver.py\", line 691, in process_request_thread\n self.finish_request(request, client_address)\n File \"/opt/hostedtoolcache/Python/3.11.6/x64/lib/python3.11/socketserver.py\", line 361, in finish_request\n self.RequestHandlerClass(request, client_address, self)\ntest/test_networking.py::TestHTTPRequestHandler::test_source_address[Urllib] PASSED [ 30%]\n File \"/opt/hostedtoolcache/Python/3.11.6/x64/lib/python3.11/socketserver.py\", line 755, in __init__\n self.handle()\n File \"/opt/hostedtoolcache/Python/3.11.6/x64/lib/python3.11/http/server.py\", line 436, in handle\n self.handle_one_request()\n File \"/opt/hostedtoolcache/Python/3.11.6/x64/lib/python3.11/http/server.py\", line 425, in handle_one_request\n self.wfile.flush() #actually send the response if not already done.\n ^^^^^^^^^^^^^^^^^^\nValueError: I/O operation on closed file.\n----------------------------------------\ntest/test_networking.py::TestHTTPRequestHandler::test_source_address[Requests] SKIPPED [ 30%]\ntest/test_networking.py::TestHTTPRequestHandler::test_gzip_trailing_garbage[Urllib] PASSED [ 30%]\ntest/test_networking.py::TestHTTPRequestHandler::test_gzip_trailing_garbage[Requests] SKIPPED [ 30%]\ntest/test_networking.py::TestHTTPRequestHandler::test_brotli[Urllib] SKIPPED [ 30%]\ntest/test_networking.py::TestHTTPRequestHandler::test_brotli[Requests] SKIPPED [ 31%]\ntest/test_networking.py::TestHTTPRequestHandler::test_deflate[Urllib] PASSED [ 31%]\ntest/test_networking.py::TestHTTPRequestHandler::test_deflate[Requests] SKIPPED [ 31%]\ntest/test_networking.py::TestHTTPRequestHandler::test_gzip[Urllib] PASSED [ 31%]\ntest/test_networking.py::TestHTTPRequestHandler::test_gzip[Requests] SKIPPED [ 31%]\ntest/test_networking.py::TestHTTPRequestHandler::test_multiple_encodings[Urllib] PASSED [ 31%]\ntest/test_networking.py::TestHTTPRequestHandler::test_multiple_encodings[Requests] SKIPPED [ 32%]\ntest/test_networking.py::TestHTTPRequestHandler::test_unsupported_encoding[Urllib] PASSED [ 32%]\ntest/test_networking.py::TestHTTPRequestHandler::test_unsupported_encoding[Requests] SKIPPED [ 32%]\ntest/test_networking.py::TestHTTPRequestHandler::test_read[Urllib] PASSED [ 32%]\ntest/test_networking.py::TestHTTPRequestHandler::test_read[Requests] SKIPPED [ 32%]\ntest/test_networking.py::TestHTTPProxy::test_http_proxy[Urllib] PASSED [ 32%]\ntest/test_networking.py::TestHTTPProxy::test_http_proxy[Requests] SKIPPED [ 33%]\ntest/test_networking.py::TestHTTPProxy::test_noproxy[Urllib] PASSED [ 33%]\ntest/test_networking.py::TestHTTPProxy::test_noproxy[Requests] SKIPPED [ 33%]\ntest/test_networking.py::TestHTTPProxy::test_allproxy[Urllib] PASSED [ 33%]\ntest/test_networking.py::TestHTTPProxy::test_allproxy[Requests] SKIPPED [ 33%]\ntest/test_networking.py::TestHTTPProxy::test_http_proxy_with_idn[Urllib] PASSED [ 33%]\ntest/test_networking.py::TestHTTPProxy::test_http_proxy_with_idn[Requests] SKIPPED [ 33%]\ntest/test_networking.py::TestClientCertificate::test_certificate_combined_nopass[Urllib] PASSED [ 34%]\ntest/test_networking.py::TestClientCertificate::test_certificate_combined_nopass[Requests] SKIPPED [ 34%]\ntest/test_networking.py::TestClientCertificate::test_certificate_nocombined_nopass[Urllib] PASSED [ 34%]\ntest/test_networking.py::TestClientCertificate::test_certificate_nocombined_nopass[Requests] SKIPPED [ 34%]\ntest/test_networking.py::TestClientCertificate::test_certificate_combined_pass[Urllib] PASSED [ 34%]\ntest/test_networking.py::TestClientCertificate::test_certificate_combined_pass[Requests] SKIPPED [ 34%]\ntest/test_networking.py::TestClientCertificate::test_certificate_nocombined_pass[Urllib] PASSED [ 35%]\ntest/test_networking.py::TestClientCertificate::test_certificate_nocombined_pass[Requests] SKIPPED [ 35%]\ntest/test_networking.py::TestUrllibRequestHandler::test_file_urls[Urllib] PASSED [ 35%]\ntest/test_networking.py::TestUrllibRequestHandler::test_http_error_returns_content[Urllib] PASSED [ 35%]\ntest/test_networking.py::TestUrllibRequestHandler::test_verify_cert_error_text[Urllib] PASSED [ 35%]\ntest/test_networking.py::TestUrllibRequestHandler::test_httplib_validation_errors[req0-method can't contain control characters-<lambda>-Urllib] PASSED [ 35%]\ntest/test_networking.py::TestUrllibRequestHandler::test_httplib_validation_errors[req1-URL can't contain control characters-<lambda>-Urllib] PASSED [ 36%]\ntest/test_networking.py::TestUrllibRequestHandler::test_httplib_validation_errors[req2-Invalid header name-None-Urllib] PASSED [ 36%]\ntest/test_networking.py::TestRequestsRequestHandler::test_request_error_mapping[Requests-<lambda>-TransportError0] SKIPPED [ 36%]\ntest/test_networking.py::TestRequestsRequestHandler::test_request_error_mapping[Requests-<lambda>-TransportError1] SKIPPED [ 36%]\ntest/test_networking.py::TestRequestsRequestHandler::test_request_error_mapping[Requests-<lambda>-TransportError2] SKIPPED [ 36%]\ntest/test_networking.py::TestRequestsRequestHandler::test_request_error_mapping[Requests-<lambda>-TransportError3] SKIPPED [ 36%]\ntest/test_networking.py::TestRequestsRequestHandler::test_request_error_mapping[Requests-<lambda>-ProxyError] SKIPPED [ 37%]\ntest/test_networking.py::TestRequestsRequestHandler::test_request_error_mapping[Requests-<lambda>-CertificateVerifyError] SKIPPED [ 37%]\ntest/test_networking.py::TestRequestsRequestHandler::test_request_error_mapping[Requests-<lambda>-SSLError] SKIPPED [ 37%]\ntest/test_networking.py::TestRequestsRequestHandler::test_request_error_mapping[Requests-<lambda>-RequestError0] SKIPPED [ 37%]\ntest/test_networking.py::TestRequestsRequestHandler::test_request_error_mapping[Requests-<lambda>-RequestError1] SKIPPED [ 37%]\ntest/test_networking.py::TestRequestsRequestHandler::test_request_error_mapping[Requests-<lambda>-TransportError4] SKIPPED [ 37%]\ntest/test_networking.py::TestRequestsRequestHandler::test_request_error_mapping[Requests-<lambda>-RequestError2] SKIPPED [ 38%]\ntest/test_networking.py::TestRequestsRequestHandler::test_response_error_mapping[Requests-<lambda>-SSLError-None] SKIPPED [ 38%]\ntest/test_networking.py::TestRequestsRequestHandler::test_response_error_mapping[Requests-<lambda>-TransportError-None0] SKIPPED [ 38%]\ntest/test_networking.py::TestRequestsRequestHandler::test_response_error_mapping[Requests-<lambda>-TransportError-None1] SKIPPED [ 38%]\ntest/test_networking.py::TestRequestsRequestHandler::test_response_error_mapping[Requests-<lambda>-TransportError-None2] SKIPPED [ 38%]\ntest/test_networking.py::TestRequestsRequestHandler::test_response_error_mapping[Requests-<lambda>-TransportError-None3] SKIPPED [ 38%]\ntest/test_networking.py::TestRequestsRequestHandler::test_response_error_mapping[Requests-<lambda>-TransportError-None4] SKIPPED [ 39%]\ntest/test_networking.py::TestRequestsRequestHandler::test_response_error_mapping[Requests-<lambda>-IncompleteRead-3 bytes read, 4 more expected] SKIPPED [ 39%]\ntest/test_networking.py::TestRequestsRequestHandler::test_response_error_mapping[Requests-<lambda>-IncompleteRead-3 bytes read, 5 more expected] SKIPPED [ 39%]\ntest/test_networking.py::TestRequestHandlerValidation::test_url_scheme[Urllib-http-False-handler_kwargs0] PASSED [ 39%]\ntest/test_networking.py::TestRequestHandlerValidation::test_url_scheme[Urllib-https-False-handler_kwargs1] PASSED [ 39%]\ntest/test_networking.py::TestRequestHandlerValidation::test_url_scheme[Urllib-data-False-handler_kwargs2] PASSED [ 39%]\ntest/test_networking.py::TestRequestHandlerValidation::test_url_scheme[Urllib-ftp-False-handler_kwargs3] PASSED [ 40%]\ntest/test_networking.py::TestRequestHandlerValidation::test_url_scheme[Urllib-file-UnsupportedRequest-handler_kwargs4] PASSED [ 40%]\ntest/test_networking.py::TestRequestHandlerValidation::test_url_scheme[Urllib-file-False-handler_kwargs5] PASSED [ 40%]\ntest/test_networking.py::TestRequestHandlerValidation::test_url_scheme[Requests-http-False-handler_kwargs6] SKIPPED [ 40%]\ntest/test_networking.py::TestRequestHandlerValidation::test_url_scheme[Requests-https-False-handler_kwargs7] SKIPPED [ 40%]\ntest/test_networking.py::TestRequestHandlerValidation::test_url_scheme[NoCheckRH-http-False-handler_kwargs8] PASSED [ 40%]\ntest/test_networking.py::TestRequestHandlerValidation::test_url_scheme[ValidationRH-http-UnsupportedRequest-handler_kwargs9] PASSED [ 40%]\ntest/test_networking.py::TestRequestHandlerValidation::test_no_proxy[Urllib-False] PASSED [ 41%]\ntest/test_networking.py::TestRequestHandlerValidation::test_no_proxy[Requests-False] SKIPPED [ 41%]\ntest/test_networking.py::TestRequestHandlerValidation::test_proxy_key[Urllib-all-False] PASSED [ 41%]\ntest/test_networking.py::TestRequestHandlerValidation::test_proxy_key[Urllib-unrelated-False] PASSED [ 41%]\ntest/test_networking.py::TestRequestHandlerValidation::test_proxy_key[Requests-all-False] SKIPPED [ 41%]\ntest/test_networking.py::TestRequestHandlerValidation::test_proxy_key[Requests-unrelated-False] SKIPPED [ 41%]\ntest/test_networking.py::TestRequestHandlerValidation::test_proxy_key[NoCheckRH-all-False] PASSED [ 42%]\ntest/test_networking.py::TestRequestHandlerValidation::test_proxy_key[HTTPSupportedRH-all-UnsupportedRequest] PASSED [ 42%]\ntest/test_networking.py::TestRequestHandlerValidation::test_proxy_key[HTTPSupportedRH-no-UnsupportedRequest] PASSED [ 42%]\ntest/test_networking.py::TestRequestHandlerValidation::test_proxy_scheme[Urllib-http-False] PASSED [ 42%]\ntest/test_networking.py::TestRequestHandlerValidation::test_proxy_scheme[Urllib-https-UnsupportedRequest] PASSED [ 42%]\ntest/test_networking.py::TestRequestHandlerValidation::test_proxy_scheme[Urllib-socks4-False] PASSED [ 42%]\ntest/test_networking.py::TestRequestHandlerValidation::test_proxy_scheme[Urllib-socks4a-False] PASSED [ 43%]\ntest/test_networking.py::TestRequestHandlerValidation::test_proxy_scheme[Urllib-socks5-False] PASSED [ 43%]\ntest/test_networking.py::TestRequestHandlerValidation::test_proxy_scheme[Urllib-socks5h-False] PASSED [ 43%]\ntest/test_networking.py::TestRequestHandlerValidation::test_proxy_scheme[Urllib-socks-UnsupportedRequest] PASSED [ 43%]\ntest/test_networking.py::TestRequestHandlerValidation::test_proxy_scheme[Requests-http-False] SKIPPED [ 43%]\ntest/test_networking.py::TestRequestHandlerValidation::test_proxy_scheme[Requests-https-False] SKIPPED [ 43%]\ntest/test_networking.py::TestRequestHandlerValidation::test_proxy_scheme[Requests-socks4-False] SKIPPED [ 44%]\ntest/test_networking.py::TestRequestHandlerValidation::test_proxy_scheme[Requests-socks4a-False] SKIPPED [ 44%]\ntest/test_networking.py::TestRequestHandlerValidation::test_proxy_scheme[Requests-socks5-False] SKIPPED [ 44%]\ntest/test_networking.py::TestRequestHandlerValidation::test_proxy_scheme[Requests-socks5h-False] SKIPPED [ 44%]\ntest/test_networking.py::TestRequestHandlerValidation::test_proxy_scheme[NoCheckRH-http-False] PASSED [ 44%]\ntest/test_networking.py::TestRequestHandlerValidation::test_proxy_scheme[HTTPSupportedRH-http-UnsupportedRequest] PASSED [ 44%]\ntest/test_networking.py::TestRequestHandlerValidation::test_empty_proxy[Urllib] PASSED [ 45%]\ntest/test_networking.py::TestRequestHandlerValidation::test_empty_proxy[HTTPSupportedRH] PASSED [ 45%]\ntest/test_networking.py::TestRequestHandlerValidation::test_empty_proxy[Requests] SKIPPED [ 45%]\ntest/test_networking.py::TestRequestHandlerValidation::test_invalid_proxy_url[Urllib-//example.com] PASSED [ 45%]\ntest/test_networking.py::TestRequestHandlerValidation::test_invalid_proxy_url[Urllib-example.com] PASSED [ 45%]\ntest/test_networking.py::TestRequestHandlerValidation::test_invalid_proxy_url[Urllib-127.0.0.1] PASSED [ 45%]\ntest/test_networking.py::TestRequestHandlerValidation::test_invalid_proxy_url[Urllib-/a/b/c] PASSED [ 46%]\ntest/test_networking.py::TestRequestHandlerValidation::test_invalid_proxy_url[Requests-//example.com] SKIPPED [ 46%]\ntest/test_networking.py::TestRequestHandlerValidation::test_invalid_proxy_url[Requests-example.com] SKIPPED [ 46%]\ntest/test_networking.py::TestRequestHandlerValidation::test_invalid_proxy_url[Requests-127.0.0.1] SKIPPED [ 46%]\ntest/test_networking.py::TestRequestHandlerValidation::test_invalid_proxy_url[Requests-/a/b/c] SKIPPED [ 46%]\ntest/test_networking.py::TestRequestHandlerValidation::test_extension[Urllib-extensions0-AssertionError] PASSED [ 46%]\ntest/test_networking.py::TestRequestHandlerValidation::test_extension[Urllib-extensions1-False] PASSED [ 46%]\ntest/test_networking.py::TestRequestHandlerValidation::test_extension[Urllib-extensions2-AssertionError] PASSED [ 47%]\ntest/test_networking.py::TestRequestHandlerValidation::test_extension[Urllib-extensions3-False] PASSED [ 47%]\ntest/test_networking.py::TestRequestHandlerValidation::test_extension[Urllib-extensions4-AssertionError] PASSED [ 47%]\ntest/test_networking.py::TestRequestHandlerValidation::test_extension[Urllib-extensions5-UnsupportedRequest] PASSED [ 47%]\ntest/test_networking.py::TestRequestHandlerValidation::test_extension[Requests-extensions6-AssertionError] SKIPPED [ 47%]\ntest/test_networking.py::TestRequestHandlerValidation::test_extension[Requests-extensions7-False] SKIPPED [ 47%]\ntest/test_networking.py::TestRequestHandlerValidation::test_extension[Requests-extensions8-False] SKIPPED [ 48%]\ntest/test_networking.py::TestRequestHandlerValidation::test_extension[Requests-extensions9-AssertionError] SKIPPED [ 48%]\ntest/test_networking.py::TestRequestHandlerValidation::test_extension[Requests-extensions10-UnsupportedRequest] SKIPPED [ 48%]\ntest/test_networking.py::TestRequestHandlerValidation::test_extension[NoCheckRH-extensions11-False] PASSED [ 48%]\ntest/test_networking.py::TestRequestHandlerValidation::test_extension[NoCheckRH-extensions12-False] PASSED [ 48%]\ntest/test_networking.py::TestRequestHandlerValidation::test_invalid_request_type PASSED [ 48%]\ntest/test_networking.py::TestRequestDirector::test_handler_operations PASSED [ 49%]\ntest/test_networking.py::TestRequestDirector::test_send PASSED [ 49%]\ntest/test_networking.py::TestRequestDirector::test_unsupported_handlers PASSED [ 49%]\ntest/test_networking.py::TestRequestDirector::test_unexpected_error PASSED [ 49%]\ntest/test_networking.py::TestRequestDirector::test_preference PASSED [ 49%]\ntest/test_networking.py::TestYoutubeDLNetworking::test_compat_opener PASSED [ 49%]\ntest/test_networking.py::TestYoutubeDLNetworking::test_proxy[http://127.0.0.1:8080-expected0] PASSED [ 50%]\ntest/test_networking.py::TestYoutubeDLNetworking::test_proxy[-expected1] PASSED [ 50%]\ntest/test_networking.py::TestYoutubeDLNetworking::test_proxy[None-expected2] PASSED [ 50%]\ntest/test_networking.py::TestYoutubeDLNetworking::test_compat_request PASSED [ 50%]\ntest/test_networking.py::TestYoutubeDLNetworking::test_extract_basic_auth PASSED [ 50%]\ntest/test_networking.py::TestYoutubeDLNetworking::test_sanitize_url PASSED [ 50%]\ntest/test_networking.py::TestYoutubeDLNetworking::test_file_urls_error PASSED [ 51%]\ntest/test_networking.py::TestYoutubeDLNetworking::test_legacy_server_connect_error PASSED [ 51%]\ntest/test_networking.py::TestYoutubeDLNetworking::test_clean_proxy[http-__noproxy__-None] PASSED [ 51%]\ntest/test_networking.py::TestYoutubeDLNetworking::test_clean_proxy[no-127.0.0.1,foo.bar-127.0.0.1,foo.bar] PASSED [ 51%]\ntest/test_networking.py::TestYoutubeDLNetworking::test_clean_proxy[https-example.com-http://example.com] PASSED [ 51%]\ntest/test_networking.py::TestYoutubeDLNetworking::test_clean_proxy[https-//example.com-http://example.com] PASSED [ 51%]\ntest/test_networking.py::TestYoutubeDLNetworking::test_clean_proxy[https-socks5://example.com-socks5h://example.com] PASSED [ 52%]\ntest/test_networking.py::TestYoutubeDLNetworking::test_clean_proxy[http-socks://example.com-socks4://example.com] PASSED [ 52%]\ntest/test_networking.py::TestYoutubeDLNetworking::test_clean_proxy[http-socks4://example.com-socks4://example.com] PASSED [ 52%]\ntest/test_networking.py::TestYoutubeDLNetworking::test_clean_proxy[unrelated-/bad/proxy-/bad/proxy] PASSED [ 52%]\ntest/test_networking.py::TestYoutubeDLNetworking::test_clean_proxy_header PASSED [ 52%]\ntest/test_networking.py::TestYoutubeDLNetworking::test_clean_header PASSED [ 52%]\ntest/test_networking.py::TestYoutubeDLNetworking::test_build_handler_params PASSED [ 53%]\ntest/test_networking.py::TestYoutubeDLNetworking::test_client_certificate[ydl_params0] PASSED [ 53%]\ntest/test_networking.py::TestYoutubeDLNetworking::test_client_certificate[ydl_params1] PASSED [ 53%]\ntest/test_networking.py::TestYoutubeDLNetworking::test_client_certificate[ydl_params2] PASSED [ 53%]\ntest/test_networking.py::TestYoutubeDLNetworking::test_client_certificate[ydl_params3] PASSED [ 53%]\ntest/test_networking.py::TestYoutubeDLNetworking::test_urllib_file_urls PASSED [ 53%]\ntest/test_networking.py::TestYoutubeDLNetworking::test_compat_opt_prefer_urllib PASSED [ 53%]\ntest/test_networking.py::TestRequest::test_query PASSED [ 54%]\ntest/test_networking.py::TestRequest::test_method PASSED [ 54%]\ntest/test_networking.py::TestRequest::test_request_helpers PASSED [ 54%]\ntest/test_networking.py::TestRequest::test_headers PASSED [ 54%]\ntest/test_networking.py::TestRequest::test_data_type PASSED [ 54%]\ntest/test_networking.py::TestRequest::test_content_length_header PASSED [ 54%]\ntest/test_networking.py::TestRequest::test_content_type_header PASSED [ 55%]\ntest/test_networking.py::TestRequest::test_update_req PASSED [ 55%]\ntest/test_networking.py::TestRequest::test_proxies PASSED [ 55%]\ntest/test_networking.py::TestRequest::test_extensions PASSED [ 55%]\ntest/test_networking.py::TestRequest::test_copy PASSED [ 55%]\ntest/test_networking.py::TestRequest::test_url PASSED [ 55%]\ntest/test_networking.py::TestResponse::test_reason[custom-200-custom] PASSED [ 56%]\ntest/test_networking.py::TestResponse::test_reason[None-404-Not Found] PASSED [ 56%]\ntest/test_networking.py::TestResponse::test_reason[-403-Forbidden] PASSED [ 56%]\ntest/test_networking.py::TestResponse::test_reason[None-999-None] PASSED [ 56%]\ntest/test_networking.py::TestResponse::test_headers PASSED [ 56%]\ntest/test_networking.py::TestResponse::test_get_header PASSED [ 56%]\ntest/test_networking.py::TestResponse::test_compat PASSED [ 57%]\ntest/test_networking_utils.py::TestNetworkingUtils::test_select_proxy PASSED [ 57%]\ntest/test_networking_utils.py::TestNetworkingUtils::test_make_socks_proxy_opts[socks5h://example.com-expected0] PASSED [ 57%]\ntest/test_networking_utils.py::TestNetworkingUtils::test_make_socks_proxy_opts[***example.com:5555-expected1] PASSED [ 57%]\ntest/test_networking_utils.py::TestNetworkingUtils::test_make_socks_proxy_opts[***127.0.0.1:1080-expected2] PASSED [ 57%]\ntest/test_networking_utils.py::TestNetworkingUtils::test_make_socks_proxy_opts[socks4a://:pa%[email protected]] PASSED [ 57%]\ntest/test_networking_utils.py::TestNetworkingUtils::test_make_socks_proxy_unknown PASSED [ 58%]\ntest/test_networking_utils.py::TestNetworkingUtils::test_load_certifi SKIPPED [ 58%]\ntest/test_networking_utils.py::TestNetworkingUtils::test_get_redirect_method[GET-303-GET] PASSED [ 58%]\ntest/test_networking_utils.py::TestNetworkingUtils::test_get_redirect_method[HEAD-303-HEAD] PASSED [ 58%]\ntest/test_networking_utils.py::TestNetworkingUtils::test_get_redirect_method[PUT-303-GET] PASSED [ 58%]\ntest/test_networking_utils.py::TestNetworkingUtils::test_get_redirect_method[POST-301-GET] PASSED [ 58%]\ntest/test_networking_utils.py::TestNetworkingUtils::test_get_redirect_method[HEAD-301-HEAD] PASSED [ 59%]\ntest/test_networking_utils.py::TestNetworkingUtils::test_get_redirect_method[POST-302-GET] PASSED [ 59%]\ntest/test_networking_utils.py::TestNetworkingUtils::test_get_redirect_method[HEAD-302-HEAD] PASSED [ 59%]\ntest/test_networking_utils.py::TestNetworkingUtils::test_get_redirect_method[PUT-302-PUT] PASSED [ 59%]\ntest/test_networking_utils.py::TestNetworkingUtils::test_get_redirect_method[POST-308-POST] PASSED [ 59%]\ntest/test_networking_utils.py::TestNetworkingUtils::test_get_redirect_method[POST-307-POST] PASSED [ 59%]\ntest/test_networking_utils.py::TestNetworkingUtils::test_get_redirect_method[HEAD-308-HEAD] PASSED [ 60%]\ntest/test_networking_utils.py::TestNetworkingUtils::test_get_redirect_method[HEAD-307-HEAD] PASSED [ 60%]\ntest/test_networking_utils.py::TestNetworkingUtils::test_add_accept_encoding_header[headers0-supported_encodings0-expected0] PASSED [ 60%]\ntest/test_networking_utils.py::TestNetworkingUtils::test_add_accept_encoding_header[headers1-supported_encodings1-expected1] PASSED [ 60%]\ntest/test_networking_utils.py::TestNetworkingUtils::test_add_accept_encoding_header[headers2-supported_encodings2-expected2] PASSED [ 60%]\ntest/test_networking_utils.py::TestInstanceStoreMixin::test_mixin PASSED [ 60%]\ntest/test_networking_utils.py::TestNetworkingExceptions::test_http_error[HTTPError] PASSED [ 60%]\ntest/test_networking_utils.py::TestNetworkingExceptions::test_http_error[<lambda>] PASSED [ 61%]\ntest/test_networking_utils.py::TestNetworkingExceptions::test_redirect_http_error[HTTPError] PASSED [ 61%]\ntest/test_networking_utils.py::TestNetworkingExceptions::test_redirect_http_error[<lambda>] PASSED [ 61%]\ntest/test_networking_utils.py::TestNetworkingExceptions::test_compat_http_error PASSED [ 61%]\ntest/test_networking_utils.py::TestNetworkingExceptions::test_compat_http_error_autoclose PASSED [ 61%]\ntest/test_networking_utils.py::TestNetworkingExceptions::test_incomplete_read_error PASSED [ 61%]\ntest/test_plugins.py::TestPlugins::test_directories_containing_plugins PASSED [ 62%]\ntest/test_plugins.py::TestPlugins::test_extractor_classes PASSED [ 62%]\ntest/test_plugins.py::TestPlugins::test_importing_zipped_module PASSED [ 62%]\ntest/test_plugins.py::TestPlugins::test_postprocessor_classes PASSED [ 62%]\ntest/test_postprocessors.py::TestMetadataFromField::test_field_to_template PASSED [ 62%]\ntest/test_postprocessors.py::TestMetadataFromField::test_format_to_regex PASSED [ 62%]\ntest/test_postprocessors.py::TestMetadataFromField::test_metadatafromfield PASSED [ 63%]\ntest/test_postprocessors.py::TestConvertThumbnail::test_escaping PASSED [ 63%]\ntest/test_postprocessors.py::TestExec::test_parse_cmd PASSED [ 63%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_make_concat_opts_CommonCase PASSED [ 63%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_make_concat_opts_NoZeroDurationChunkAtVideoEnd PASSED [ 63%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_make_concat_opts_NoZeroDurationChunkAtVideoStart PASSED [ 63%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_quote_for_concat_QuotesAtEnd PASSED [ 64%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_quote_for_concat_QuotesAtStart PASSED [ 64%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_quote_for_concat_RunsOfQuotes PASSED [ 64%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_CanGetThroughUnaltered PASSED [ 64%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_ChapterWithAdjacentCuts PASSED [ 64%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_ChapterWithAdjacentSponsors PASSED [ 64%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_ChapterWithCutHidingSponsor PASSED [ 65%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_ChapterWithCuts PASSED [ 65%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_ChapterWithOverlappingCuts PASSED [ 65%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_ChapterWithOverlappingSponsors PASSED [ 65%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_ChapterWithRunsOfOverlappingCuts PASSED [ 65%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_ChapterWithRunsOfOverlappingSponsors PASSED [ 65%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_ChapterWithRunsOfOverlappingSponsorsAndCuts PASSED [ 66%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_ChapterWithSponsorCutInTheMiddle PASSED [ 66%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_ChapterWithSponsors PASSED [ 66%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_ChapterWithSponsorsAndCuts PASSED [ 66%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_ChaptersAfterLastCut PASSED [ 66%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_ChaptersAfterLastSponsor PASSED [ 66%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_CutCoincidesWithChapters PASSED [ 66%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_CutEndsAtChapterEnd PASSED [ 67%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_CutOverlapsMultipleChapters PASSED [ 67%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_CutStartsAtChapterStart PASSED [ 67%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_CutsAtVideoBoundaries PASSED [ 67%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_CutsOverlapChaptersAtVideoBoundaries PASSED [ 67%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_CutsWithinSomeChaptersAndOverlappingOthers PASSED [ 67%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_EverythingCut PASSED [ 68%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_EverythingSponsored PASSED [ 68%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_OverlappingSponsorsDifferentTitlesAfterCut PASSED [ 68%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_SingleTinyChapterIsPreserved PASSED [ 68%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_SmallestSponsorInTheOverlapGetsNamed PASSED [ 68%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_SponsorBlockChapters PASSED [ 68%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_SponsorCoincidesWithChapters PASSED [ 69%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_SponsorEndsAtChapterEnd PASSED [ 69%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_SponsorOverlapsMultipleChapters PASSED [ 69%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_SponsorStartsAtChapterStart PASSED [ 69%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_SponsorsAtVideoBoundaries PASSED [ 69%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_SponsorsNoLongerOverlapAfterCut PASSED [ 69%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_SponsorsOverlapChaptersAtVideoBoundaries PASSED [ 70%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_SponsorsStillOverlapAfterCut PASSED [ 70%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_SponsorsWithinSomeChaptersAndOverlappingOthers PASSED [ 70%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_TinyChapterAtTheStartPrependedToTheNext PASSED [ 70%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_TinyChaptersInTheOriginalArePreserved PASSED [ 70%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_TinyChaptersResultingFromCutsAreIgnored PASSED [ 70%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_TinyChaptersResultingFromSponsorOverlapAreIgnored PASSED [ 71%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_TinySponsorsAreIgnored PASSED [ 71%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_TinySponsorsOverlapsAreIgnored PASSED [ 71%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_TinySponsorsPrependedToTheNextSponsor PASSED [ 71%]\ntest/test_postprocessors.py::TestModifyChaptersPP::test_remove_marked_arrange_sponsors_UniqueNamesForOverlappingSponsors PASSED [ 71%]\ntest/test_socks.py::TestSocks4Proxy::test_socks4_no_auth[Urllib-http] PASSED [ 71%]\ntest/test_socks.py::TestSocks4Proxy::test_socks4_no_auth[Requests-http] SKIPPED [ 72%]\ntest/test_socks.py::TestSocks4Proxy::test_socks4_auth[Urllib-http] PASSED [ 72%]\ntest/test_socks.py::TestSocks4Proxy::test_socks4_auth[Requests-http] SKIPPED [ 72%]\ntest/test_socks.py::TestSocks4Proxy::test_socks4a_ipv4_target[Urllib-http] PASSED [ 72%]\ntest/test_socks.py::TestSocks4Proxy::test_socks4a_ipv4_target[Requests-http] SKIPPED [ 72%]\ntest/test_socks.py::TestSocks4Proxy::test_socks4a_domain_target[Urllib-http] PASSED [ 72%]\ntest/test_socks.py::TestSocks4Proxy::test_socks4a_domain_target[Requests-http] SKIPPED [ 73%]\ntest/test_socks.py::TestSocks4Proxy::test_ipv4_client_source_address[Urllib-http] PASSED [ 73%]\ntest/test_socks.py::TestSocks4Proxy::test_ipv4_client_source_address[Requests-http] SKIPPED [ 73%]\ntest/test_socks.py::TestSocks4Proxy::test_socks4_errors[91-Urllib-http] PASSED [ 73%]\ntest/test_socks.py::TestSocks4Proxy::test_socks4_errors[91-Requests-http] SKIPPED [ 73%]\ntest/test_socks.py::TestSocks4Proxy::test_socks4_errors[92-Urllib-http] PASSED [ 73%]\ntest/test_socks.py::TestSocks4Proxy::test_socks4_errors[92-Requests-http] SKIPPED [ 73%]\ntest/test_socks.py::TestSocks4Proxy::test_socks4_errors[93-Urllib-http] PASSED [ 74%]\ntest/test_socks.py::TestSocks4Proxy::test_socks4_errors[93-Requests-http] SKIPPED [ 74%]\ntest/test_socks.py::TestSocks4Proxy::test_ipv6_socks4_proxy[Urllib-http] PASSED [ 74%]\ntest/test_socks.py::TestSocks4Proxy::test_ipv6_socks4_proxy[Requests-http] SKIPPED [ 74%]\ntest/test_socks.py::TestSocks4Proxy::test_timeout[Urllib-http] PASSED [ 74%]\ntest/test_socks.py::TestSocks4Proxy::test_timeout[Requests-http] SKIPPED [ 74%]\ntest/test_socks.py::TestSocks5Proxy::test_socks5_no_auth[Urllib-http] PASSED [ 75%]\ntest/test_socks.py::TestSocks5Proxy::test_socks5_no_auth[Requests-http] SKIPPED [ 75%]\ntest/test_socks.py::TestSocks5Proxy::test_socks5_user_pass[Urllib-http] PASSED [ 75%]\ntest/test_socks.py::TestSocks5Proxy::test_socks5_user_pass[Requests-http] SKIPPED [ 75%]\ntest/test_socks.py::TestSocks5Proxy::test_socks5_ipv4_target[Urllib-http] PASSED [ 75%]\ntest/test_socks.py::TestSocks5Proxy::test_socks5_ipv4_target[Requests-http] SKIPPED [ 75%]\ntest/test_socks.py::TestSocks5Proxy::test_socks5_domain_target[Urllib-http] PASSED [ 76%]\ntest/test_socks.py::TestSocks5Proxy::test_socks5_domain_target[Requests-http] SKIPPED [ 76%]\ntest/test_socks.py::TestSocks5Proxy::test_socks5h_domain_target[Urllib-http] PASSED [ 76%]\ntest/test_socks.py::TestSocks5Proxy::test_socks5h_domain_target[Requests-http] SKIPPED [ 76%]\ntest/test_socks.py::TestSocks5Proxy::test_socks5h_ip_target[Urllib-http] PASSED [ 76%]\ntest/test_socks.py::TestSocks5Proxy::test_socks5h_ip_target[Requests-http] SKIPPED [ 76%]\ntest/test_socks.py::TestSocks5Proxy::test_socks5_ipv6_destination[Urllib-http] PASSED [ 77%]\ntest/test_socks.py::TestSocks5Proxy::test_socks5_ipv6_destination[Requests-http] SKIPPED [ 77%]\ntest/test_socks.py::TestSocks5Proxy::test_ipv6_socks5_proxy[Urllib-http] PASSED [ 77%]\ntest/test_socks.py::TestSocks5Proxy::test_ipv6_socks5_proxy[Requests-http] SKIPPED [ 77%]\ntest/test_socks.py::TestSocks5Proxy::test_ipv4_client_source_address[Urllib-http] PASSED [ 77%]\ntest/test_socks.py::TestSocks5Proxy::test_ipv4_client_source_address[Requests-http] SKIPPED [ 77%]\ntest/test_socks.py::TestSocks5Proxy::test_socks5_errors[1-Urllib-http] PASSED [ 78%]\ntest/test_socks.py::TestSocks5Proxy::test_socks5_errors[1-Requests-http] SKIPPED [ 78%]\ntest/test_socks.py::TestSocks5Proxy::test_socks5_errors[2-Urllib-http] PASSED [ 78%]\ntest/test_socks.py::TestSocks5Proxy::test_socks5_errors[2-Requests-http] SKIPPED [ 78%]\ntest/test_socks.py::TestSocks5Proxy::test_socks5_errors[3-Urllib-http] PASSED [ 78%]\ntest/test_socks.py::TestSocks5Proxy::test_socks5_errors[3-Requests-http] SKIPPED [ 78%]\ntest/test_socks.py::TestSocks5Proxy::test_socks5_errors[4-Urllib-http] PASSED [ 79%]\ntest/test_socks.py::TestSocks5Proxy::test_socks5_errors[4-Requests-http] SKIPPED [ 79%]\ntest/test_socks.py::TestSocks5Proxy::test_socks5_errors[5-Urllib-http] PASSED [ 79%]\ntest/test_socks.py::TestSocks5Proxy::test_socks5_errors[5-Requests-http] SKIPPED [ 79%]\ntest/test_socks.py::TestSocks5Proxy::test_socks5_errors[6-Urllib-http] PASSED [ 79%]\ntest/test_socks.py::TestSocks5Proxy::test_socks5_errors[6-Requests-http] SKIPPED [ 79%]\ntest/test_socks.py::TestSocks5Proxy::test_socks5_errors[7-Urllib-http] PASSED [ 80%]\ntest/test_socks.py::TestSocks5Proxy::test_socks5_errors[7-Requests-http] SKIPPED [ 80%]\ntest/test_socks.py::TestSocks5Proxy::test_socks5_errors[8-Urllib-http] PASSED [ 80%]\ntest/test_socks.py::TestSocks5Proxy::test_socks5_errors[8-Requests-http] SKIPPED [ 80%]\ntest/test_socks.py::TestSocks5Proxy::test_timeout[Urllib-http] PASSED [ 80%]\ntest/test_utils.py::TestUtil::test_LazyList PASSED [ 80%]\ntest/test_utils.py::TestUtil::test_LazyList_laziness PASSED [ 80%]\ntest/test_utils.py::TestUtil::test_Popen_windows_escaping SKIPPED (O...) [ 81%]\ntest/test_utils.py::TestUtil::test_age_restricted PASSED [ 81%]\ntest/test_utils.py::TestUtil::test_args_to_str PASSED [ 81%]\ntest/test_utils.py::TestUtil::test_base_url PASSED [ 81%]\ntest/test_utils.py::TestUtil::test_caesar PASSED [ 81%]\ntest/test_utils.py::TestUtil::test_clean_html PASSED [ 81%]\ntest/test_utils.py::TestUtil::test_clean_podcast_url PASSED [ 82%]\ntest/test_utils.py::TestUtil::test_cli_bool_option PASSED [ 82%]\ntest/test_utils.py::TestUtil::test_cli_option PASSED [ 82%]\ntest/test_utils.py::TestUtil::test_cli_valueless_option PASSED [ 82%]\ntest/test_utils.py::TestUtil::test_date_from_str PASSED [ 82%]\ntest/test_utils.py::TestUtil::test_daterange PASSED [ 82%]\ntest/test_utils.py::TestUtil::test_datetime_from_str PASSED [ 83%]\ntest/test_utils.py::TestUtil::test_detect_exe_version PASSED [ 83%]\ntest/test_utils.py::TestUtil::test_determine_ext PASSED [ 83%]\ntest/test_utils.py::TestUtil::test_determine_file_encoding PASSED [ 83%]\ntest/test_utils.py::TestUtil::test_dfxp2srt PASSED [ 83%]\ntest/test_utils.py::TestUtil::test_dict_get PASSED [ 83%]\ntest/test_utils.py::TestUtil::test_encode_base_n PASSED [ 84%]\ntest/test_utils.py::TestUtil::test_encode_compat_str PASSED [ 84%]\ntest/test_utils.py::TestUtil::test_escape_rfc3986 PASSED [ 84%]\ntest/test_utils.py::TestUtil::test_expand_path PASSED [ 84%]\ntest/test_utils.py::TestUtil::test_extract_attributes PASSED [ 84%]\ntest/test_utils.py::TestUtil::test_extract_basic_auth PASSED [ 84%]\ntest/test_utils.py::TestUtil::test_find_xpath_attr PASSED [ 85%]\ntest/test_utils.py::TestUtil::test_fix_xml_ampersands PASSED [ 85%]\ntest/test_utils.py::TestUtil::test_float_or_none PASSED [ 85%]\ntest/test_utils.py::TestUtil::test_format_bytes PASSED [ 85%]\ntest/test_utils.py::TestUtil::test_get_compatible_ext PASSED [ 85%]\ntest/test_utils.py::TestUtil::test_get_element_by_attribute PASSED [ 85%]\ntest/test_utils.py::TestUtil::test_get_element_by_class PASSED [ 86%]\ntest/test_utils.py::TestUtil::test_get_element_html_by_attribute PASSED [ 86%]\ntest/test_utils.py::TestUtil::test_get_element_html_by_class PASSED [ 86%]\ntest/test_utils.py::TestUtil::test_get_element_text_and_html_by_tag PASSED [ 86%]\ntest/test_utils.py::TestUtil::test_get_elements_by_attribute PASSED [ 86%]\ntest/test_utils.py::TestUtil::test_get_elements_by_class PASSED [ 86%]\ntest/test_utils.py::TestUtil::test_get_elements_html_by_attribute PASSED [ 86%]\ntest/test_utils.py::TestUtil::test_get_elements_html_by_class PASSED [ 87%]\ntest/test_utils.py::TestUtil::test_get_elements_text_and_html_by_attribute PASSED [ 87%]\ntest/test_utils.py::TestUtil::test_hide_login_info PASSED [ 87%]\ntest/test_utils.py::TestUtil::test_http_header_dict PASSED [ 87%]\ntest/test_utils.py::TestUtil::test_int_or_none PASSED [ 87%]\ntest/test_utils.py::TestUtil::test_intlist_to_bytes PASSED [ 87%]\ntest/test_utils.py::TestUtil::test_iri_to_uri PASSED [ 88%]\ntest/test_utils.py::TestUtil::test_is_html PASSED [ 88%]\ntest/test_utils.py::TestUtil::test_js_to_json_common_constructors PASSED [ 88%]\ntest/test_utils.py::TestUtil::test_js_to_json_edgecases PASSED [ 88%]\ntest/test_utils.py::TestUtil::test_js_to_json_malformed PASSED [ 88%]\ntest/test_utils.py::TestUtil::test_js_to_json_realworld PASSED [ 88%]\ntest/test_utils.py::TestUtil::test_js_to_json_template_literal PASSED [ 89%]\ntest/test_utils.py::TestUtil::test_js_to_json_vars_strings PASSED [ 89%]\ntest/test_utils.py::TestUtil::test_limit_length PASSED [ 89%]\ntest/test_utils.py::TestUtil::test_locked_file PASSED [ 89%]\ntest/test_utils.py::TestUtil::test_lowercase_escape PASSED [ 89%]\ntest/test_utils.py::TestUtil::test_match_str PASSED [ 89%]\ntest/test_utils.py::TestUtil::test_merge_dicts PASSED [ 90%]\ntest/test_utils.py::TestUtil::test_mimetype2ext PASSED [ 90%]\ntest/test_utils.py::TestUtil::test_month_by_name PASSED [ 90%]\ntest/test_utils.py::TestUtil::test_multipart_encode PASSED [ 90%]\ntest/test_utils.py::TestUtil::test_normalize_url PASSED [ 90%]\ntest/test_utils.py::TestUtil::test_ohdave_rsa_encrypt PASSED [ 90%]\ntest/test_utils.py::TestUtil::test_ordered_set PASSED [ 91%]\ntest/test_utils.py::TestUtil::test_paged_list PASSED [ 91%]\ntest/test_utils.py::TestUtil::test_parse_age_limit PASSED [ 91%]\ntest/test_utils.py::TestUtil::test_parse_bitrate PASSED [ 91%]\ntest/test_utils.py::TestUtil::test_parse_codecs PASSED [ 91%]\ntest/test_utils.py::TestUtil::test_parse_count PASSED [ 91%]\ntest/test_utils.py::TestUtil::test_parse_dfxp_time_expr PASSED [ 92%]\ntest/test_utils.py::TestUtil::test_parse_duration PASSED [ 92%]\ntest/test_utils.py::TestUtil::test_parse_filesize PASSED [ 92%]\ntest/test_utils.py::TestUtil::test_parse_iso8601 PASSED [ 92%]\ntest/test_utils.py::TestUtil::test_parse_resolution PASSED [ 92%]\ntest/test_utils.py::TestUtil::test_pkcs1pad PASSED [ 92%]\ntest/test_utils.py::TestUtil::test_prepend_extension PASSED [ 93%]\ntest/test_utils.py::TestUtil::test_read_batch_urls PASSED [ 93%]\ntest/test_utils.py::TestUtil::test_remove_dot_segments PASSED [ 93%]\ntest/test_utils.py::TestUtil::test_remove_end PASSED [ 93%]\ntest/test_utils.py::TestUtil::test_remove_quotes PASSED [ 93%]\ntest/test_utils.py::TestUtil::test_remove_start PASSED [ 93%]\ntest/test_utils.py::TestUtil::test_render_table PASSED [ 93%]\ntest/test_utils.py::TestUtil::test_replace_extension PASSED [ 94%]\ntest/test_utils.py::TestUtil::test_rot47 PASSED [ 94%]\ntest/test_utils.py::TestUtil::test_sanitize_filename PASSED [ 94%]\ntest/test_utils.py::TestUtil::test_sanitize_filename_restricted PASSED [ 94%]\ntest/test_utils.py::TestUtil::test_sanitize_ids PASSED [ 94%]\ntest/test_utils.py::TestUtil::test_sanitize_path PASSED [ 94%]\ntest/test_utils.py::TestUtil::test_sanitize_url PASSED [ 95%]\ntest/test_utils.py::TestUtil::test_shell_quote PASSED [ 95%]\ntest/test_utils.py::TestUtil::test_smuggle_url PASSED [ 95%]\ntest/test_utils.py::TestUtil::test_str_to_int PASSED [ 95%]\ntest/test_utils.py::TestUtil::test_strip_jsonp PASSED [ 95%]\ntest/test_utils.py::TestUtil::test_strip_or_none PASSED [ 95%]\ntest/test_utils.py::TestUtil::test_subtitles_filename PASSED [ 96%]\ntest/test_utils.py::TestUtil::test_timeconvert PASSED [ 96%]\ntest/test_utils.py::TestUtil::test_traverse_obj PASSED [ 96%]\ntest/test_utils.py::TestUtil::test_try_call PASSED [ 96%]\ntest/test_utils.py::TestUtil::test_unescape_html PASSED [ 96%]\ntest/test_utils.py::TestUtil::test_unified_dates PASSED [ 96%]\ntest/test_utils.py::TestUtil::test_unified_timestamps PASSED [ 97%]\ntest/test_utils.py::TestUtil::test_update_url_query PASSED [ 97%]\ntest/test_utils.py::TestUtil::test_uppercase_escape PASSED [ 97%]\ntest/test_utils.py::TestUtil::test_url_basename PASSED [ 97%]\ntest/test_utils.py::TestUtil::test_url_or_none PASSED [ 97%]\ntest/test_utils.py::TestUtil::test_urlencode_postdata PASSED [ 97%]\ntest/test_utils.py::TestUtil::test_urljoin PASSED [ 98%]\ntest/test_utils.py::TestUtil::test_urshift PASSED [ 98%]\ntest/test_utils.py::TestUtil::test_variadic PASSED [ 98%]\ntest/test_utils.py::TestUtil::test_version_tuple PASSED [ 98%]\ntest/test_utils.py::TestUtil::test_xpath_attr PASSED [ 98%]\ntest/test_utils.py::TestUtil::test_xpath_element PASSED [ 98%]\ntest/test_utils.py::TestUtil::test_xpath_text PASSED [ 99%]\ntest/test_utils.py::TestUtil::test_xpath_with_ns PASSED [ 99%]\ntest/test_verbose_output.py::TestVerboseOutput::test_private_info_arg PASSED [ 99%]\ntest/test_verbose_output.py::TestVerboseOutput::test_private_info_eq PASSED [ 99%]\ntest/test_verbose_output.py::TestVerboseOutput::test_private_info_shortarg PASSED [ 99%]\ntest/test_verbose_output.py::TestVerboseOutput::test_private_info_shortarg_eq PASSED [ 99%]\ntest/test_youtube_misc.py::TestYoutubeMisc::test_youtube_extract PASSED [100%]\n\n=================================== FAILURES ===================================\n___________________________ TestHttpFD.test_chunked ____________________________\n\nself = <test.test_downloader_http.TestHttpFD testMethod=test_chunked>\n\n def test_chunked(self):\n> self.download_all({\n 'http_chunk_size': 1000,\n })\n\ntest/test_downloader_http.py:100: \n_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ \ntest/test_downloader_http.py:94: in download_all\n self.download(params, ep)\ntest/test_downloader_http.py:86: in download\n self.assertTrue(downloader.real_download(filename, {\nyt_dlp/downloader/http.py:365: in real_download\n return download()\n_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ \n\n def download():\n data_len = ctx.data.headers.get('Content-length')\n \n if ctx.data.headers.get('Content-encoding'):\n # Content-encoding is present, Content-length is not reliable anymore as we are\n # doing auto decompression. (See: https://github.com/yt-dlp/yt-dlp/pull/6176)\n data_len = None\n \n # Range HTTP header may be ignored/unsupported by a webserver\n # (e.g. extractor/scivee.py, extractor/bambuser.py).\n # However, for a test we still would like to download just a piece of a file.\n # To achieve this we limit data_len to _TEST_FILE_SIZE and manually control\n # block size when downloading a file.\n if is_test and (data_len is None or int(data_len) > self._TEST_FILE_SIZE):\n data_len = self._TEST_FILE_SIZE\n \n if data_len is not None:\n data_len = int(data_len) + ctx.resume_len\n min_data_len = self.params.get('min_filesize')\n max_data_len = self.params.get('max_filesize')\n if min_data_len is not None and data_len < min_data_len:\n self.to_screen(\n f'\\r[download] File is smaller than min-filesize ({data_len} bytes < {min_data_len} bytes). Aborting.')\n return False\n if max_data_len is not None and data_len > max_data_len:\n self.to_screen(\n f'\\r[download] File is larger than max-filesize ({data_len} bytes > {max_data_len} bytes). Aborting.')\n return False\n \n byte_counter = 0 + ctx.resume_len\n block_size = ctx.block_size\n start = time.time()\n \n # measure time over whole while-loop, so slow_down() and best_block_size() work together properly\n now = None # needed for slow_down() in the first loop run\n before = start # start measuring\n \n def retry(e):\n close_stream()\n ctx.resume_len = (byte_counter if ctx.tmpfilename == '-'\n else os.path.getsize(encodeFilename(ctx.tmpfilename)))\n raise RetryDownload(e)\n \n while True:\n try:\n # Download and write\n data_block = ctx.data.read(block_size if not is_test else min(block_size, data_len - byte_counter))\n except TransportError as err:\n retry(err)\n \n byte_counter += len(data_block)\n \n # exit loop when download is finished\n if len(data_block) == 0:\n break\n \n # Open destination file just in time\n if ctx.stream is None:\n try:\n ctx.stream, ctx.tmpfilename = self.sanitize_open(\n ctx.tmpfilename, ctx.open_mode)\n assert ctx.stream is not None\n ctx.filename = self.undo_temp_name(ctx.tmpfilename)\n self.report_destination(ctx.filename)\n except OSError as err:\n self.report_error('unable to open for writing: %s' % str(err))\n return False\n \n if self.params.get('xattr_set_filesize', False) and data_len is not None:\n try:\n write_xattr(ctx.tmpfilename, 'user.ytdl.filesize', str(data_len).encode())\n except (XAttrUnavailableError, XAttrMetadataError) as err:\n self.report_error('unable to set filesize xattr: %s' % str(err))\n \n try:\n ctx.stream.write(data_block)\n except OSError as err:\n self.to_stderr('\\n')\n self.report_error('unable to write data: %s' % str(err))\n return False\n \n # Apply rate limit\n self.slow_down(start, now, byte_counter - ctx.resume_len)\n \n # end measuring of one loop run\n now = time.time()\n after = now\n \n # Adjust block size\n if not self.params.get('noresizebuffer', False):\n block_size = self.best_block_size(after - before, len(data_block))\n \n before = after\n \n # Progress message\n> speed = self.calc_speed(start, now, byte_counter - ctx.resume_len)\nE AttributeError: 'HttpFD' object has no attribute 'calc_speed'\n\nyt_dlp/downloader/http.py:296: AttributeError\n___________________________ TestHttpFD.test_regular ____________________________\n\nself = <test.test_downloader_http.TestHttpFD testMethod=test_regular>\n\n def test_regular(self):\n> self.download_all({})\n\ntest/test_downloader_http.py:97: \n_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ \ntest/test_downloader_http.py:94: in download_all\n self.download(params, ep)\ntest/test_downloader_http.py:86: in download\n self.assertTrue(downloader.real_download(filename, {\nyt_dlp/downloader/http.py:365: in real_download\n return download()\n_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ \n\n def download():\n data_len = ctx.data.headers.get('Content-length')\n \n if ctx.data.headers.get('Content-encoding'):\n # Content-encoding is present, Content-length is not reliable anymore as we are\n # doing auto decompression. (See: https://github.com/yt-dlp/yt-dlp/pull/6176)\n data_len = None\n \n # Range HTTP header may be ignored/unsupported by a webserver\n # (e.g. extractor/scivee.py, extractor/bambuser.py).\n # However, for a test we still would like to download just a piece of a file.\n # To achieve this we limit data_len to _TEST_FILE_SIZE and manually control\n # block size when downloading a file.\n if is_test and (data_len is None or int(data_len) > self._TEST_FILE_SIZE):\n data_len = self._TEST_FILE_SIZE\n \n if data_len is not None:\n data_len = int(data_len) + ctx.resume_len\n min_data_len = self.params.get('min_filesize')\n max_data_len = self.params.get('max_filesize')\n if min_data_len is not None and data_len < min_data_len:\n self.to_screen(\n f'\\r[download] File is smaller than min-filesize ({data_len} bytes < {min_data_len} bytes). Aborting.')\n return False\n if max_data_len is not None and data_len > max_data_len:\n self.to_screen(\n f'\\r[download] File is larger than max-filesize ({data_len} bytes > {max_data_len} bytes). Aborting.')\n return False\n \n byte_counter = 0 + ctx.resume_len\n block_size = ctx.block_size\n start = time.time()\n \n # measure time over whole while-loop, so slow_down() and best_block_size() work together properly\n now = None # needed for slow_down() in the first loop run\n before = start # start measuring\n \n def retry(e):\n close_stream()\n ctx.resume_len = (byte_counter if ctx.tmpfilename == '-'\n else os.path.getsize(encodeFilename(ctx.tmpfilename)))\n raise RetryDownload(e)\n \n while True:\n try:\n # Download and write\n data_block = ctx.data.read(block_size if not is_test else min(block_size, data_len - byte_counter))\n except TransportError as err:\n retry(err)\n \n byte_counter += len(data_block)\n \n # exit loop when download is finished\n if len(data_block) == 0:\n break\n \n # Open destination file just in time\n if ctx.stream is None:\n try:\n ctx.stream, ctx.tmpfilename = self.sanitize_open(\n ctx.tmpfilename, ctx.open_mode)\n assert ctx.stream is not None\n ctx.filename = self.undo_temp_name(ctx.tmpfilename)\n self.report_destination(ctx.filename)\n except OSError as err:\n self.report_error('unable to open for writing: %s' % str(err))\n return False\n \n if self.params.get('xattr_set_filesize', False) and data_len is not None:\n try:\n write_xattr(ctx.tmpfilename, 'user.ytdl.filesize', str(data_len).encode())\n except (XAttrUnavailableError, XAttrMetadataError) as err:\n self.report_error('unable to set filesize xattr: %s' % str(err))\n \n try:\n ctx.stream.write(data_block)\n except OSError as err:\n self.to_stderr('\\n')\n self.report_error('unable to write data: %s' % str(err))\n return False\n \n # Apply rate limit\n self.slow_down(start, now, byte_counter - ctx.resume_len)\n \n # end measuring of one loop run\n now = time.time()\n after = now\n \n # Adjust block size\n if not self.params.get('noresizebuffer', False):\n block_size = self.best_block_size(after - before, len(data_block))\n \n before = after\n \n # Progress message\n> speed = self.calc_speed(start, now, byte_counter - ctx.resume_len)\nE AttributeError: 'HttpFD' object has no attribute 'calc_speed'\n\nyt_dlp/downloader/http.py:296: AttributeError\n=========================== short test summary info ============================\nSKIPPED [1] test/test_downloader_external.py:115: ffmpeg not found\nSKIPPED [1] test/test_jsinterp.py:111: Not implemented\nSKIPPED [1] test/test_jsinterp.py:373: Not implemented\nSKIPPED [1] test/test_jsinterp.py:335: Not implemented\nSKIPPED [1] test/test_networking.py:309: Requests request handler is not available\nSKIPPED [1] test/test_networking.py:320: Requests request handler is not available\nSKIPPED [1] test/test_networking.py:338: Requests request handler is not available\nSKIPPED [1] test/test_networking.py:350: Requests request handler is not available\nSKIPPED [1] test/test_networking.py:365: Requests request handler is not available\nSKIPPED [1] test/test_networking.py:372: Requests request handler is not available\nSKIPPED [1] test/test_networking.py:382: Requests request handler is not available\nSKIPPED [1] test/test_networking.py:393: Requests request handler is not available\nSKIPPED [1] test/test_networking.py:448: Requests request handler is not available\nSKIPPED [1] test/test_networking.py:480: Requests request handler is not available\nSKIPPED [1] test/test_networking.py:486: Requests request handler is not available\nSKIPPED [1] test/test_networking.py:492: Requests request handler is not available\nSKIPPED [1] test/test_networking.py:509: Requests request handler is not available\nSKIPPED [1] test/test_networking.py:525: Requests request handler is not available\nSKIPPED [1] test/test_networking.py:541: Requests request handler is not available\nSKIPPED [1] test/test_networking.py:549: Requests request handler is not available\nSKIPPED [2] test/test_networking.py:555: brotli support is not installed\nSKIPPED [1] test/test_networking.py:566: Requests request handler is not available\nSKIPPED [1] test/test_networking.py:576: Requests request handler is not available\nSKIPPED [1] test/test_networking.py:586: Requests request handler is not available\nSKIPPED [1] test/test_networking.py:597: Requests request handler is not available\nSKIPPED [1] test/test_networking.py:607: Requests request handler is not available\nSKIPPED [1] test/test_networking.py:637: Requests request handler is not available\nSKIPPED [1] test/test_networking.py:663: Requests request handler is not available\nSKIPPED [1] test/test_networking.py:673: Requests request handler is not available\nSKIPPED [1] test/test_networking.py:681: Requests request handler is not available\nSKIPPED [1] test/test_networking.py:719: Requests request handler is not available\nSKIPPED [1] test/test_networking.py:725: Requests request handler is not available\nSKIPPED [1] test/test_networking.py:732: Requests request handler is not available\nSKIPPED [1] test/test_networking.py:739: Requests request handler is not available\nSKIPPED [11] test/test_networking.py:824: Requests request handler is not available\nSKIPPED [8] test/test_networking.py:855: Requests request handler is not available\nSKIPPED [2] test/test_networking.py:996: Requests request handler is not available\nSKIPPED [1] test/test_networking.py:1005: Requests request handler is not available\nSKIPPED [2] test/test_networking.py:1010: Requests request handler is not available\nSKIPPED [6] test/test_networking.py:1019: Requests request handler is not available\nSKIPPED [1] test/test_networking.py:1028: Requests request handler is not available\nSKIPPED [4] test/test_networking.py:1033: Requests request handler is not available\nSKIPPED [5] test/test_networking.py:1038: Requests request handler is not available\nSKIPPED [1] test/test_networking_utils.py:96: certifi is not installed\nSKIPPED [1] test/test_socks.py:266: Requests request handler is not available\nSKIPPED [1] test/test_socks.py:274: Requests request handler is not available\nSKIPPED [1] test/test_socks.py:284: Requests request handler is not available\nSKIPPED [1] test/test_socks.py:292: Requests request handler is not available\nSKIPPED [1] test/test_socks.py:301: Requests request handler is not available\nSKIPPED [3] test/test_socks.py:311: Requests request handler is not available\nSKIPPED [1] test/test_socks.py:323: Requests request handler is not available\nSKIPPED [1] test/test_socks.py:332: Requests request handler is not available\nSKIPPED [1] test/test_socks.py:342: Requests request handler is not available\nSKIPPED [1] test/test_socks.py:350: Requests request handler is not available\nSKIPPED [1] test/test_socks.py:363: Requests request handler is not available\nSKIPPED [1] test/test_socks.py:371: Requests request handler is not available\nSKIPPED [1] test/test_socks.py:379: Requests request handler is not available\nSKIPPED [1] test/test_socks.py:388: Requests request handler is not available\nSKIPPED [1] test/test_socks.py:397: Requests request handler is not available\nSKIPPED [1] test/test_socks.py:405: Requests request handler is not available\nSKIPPED [1] test/test_socks.py:416: Requests request handler is not available\nSKIPPED [8] test/test_socks.py:425: Requests request handler is not available\nSKIPPED [1] test/test_utils.py:2398: Only relevant on Windows\nFAILED test/test_downloader_http.py::TestHttpFD::test_chunked - AttributeError: 'HttpFD' object has no attribute 'calc_speed'\nFAILED test/test_downloader_http.py::TestHttpFD::test_regular - AttributeError: 'HttpFD' object has no attribute 'calc_speed'\n=== 2 failed, 509 passed, 104 skipped, 5705 deselected in 120.03s (0:02:00) ====\n##[error]Process completed with exit code 1.\n"}], "diff": "diff --git a/yt_dlp/downloader/http.py b/yt_dlp/downloader/http.py\nindex f5237443e..a1f95db29 100644\n--- a/yt_dlp/downloader/http.py\n+++ b/yt_dlp/downloader/http.py\n@@ -11,6 +11,7 @@\n )\n from ..utils import (\n ContentTooShortError,\n+ FormatProgressInfos,\n RetryManager,\n ThrottledDownload,\n XAttrMetadataError,\n@@ -293,11 +294,11 @@ def retry(e):\n before = after\n \n # Progress message\n- speed = self.calc_speed(start, now, byte_counter - ctx.resume_len)\n+ speed = FormatProgressInfos.calc_speed(start, now, byte_counter - ctx.resume_len)\n if ctx.data_len is None:\n eta = None\n else:\n- eta = self.calc_eta(start, time.time(), ctx.data_len - ctx.resume_len, byte_counter - ctx.resume_len)\n+ eta = FormatProgressInfos.calc_eta(start, time.time(), ctx.data_len - ctx.resume_len, byte_counter - ctx.resume_len)\n \n self._hook_progress({\n 'status': 'downloading',\ndiff --git a/yt_dlp/downloader/rtmp.py b/yt_dlp/downloader/rtmp.py\nindex 0e0952599..9373f3585 100644\n--- a/yt_dlp/downloader/rtmp.py\n+++ b/yt_dlp/downloader/rtmp.py\n@@ -5,6 +5,7 @@\n \n from .common import FileDownloader\n from ..utils import (\n+ FormatProgressInfos,\n Popen,\n check_executable,\n encodeArgument,\n@@ -50,8 +51,8 @@ def run_rtmpdump(args):\n resume_percent = percent\n resume_downloaded_data_len = downloaded_data_len\n time_now = time.time()\n- eta = self.calc_eta(start, time_now, 100 - resume_percent, percent - resume_percent)\n- speed = self.calc_speed(start, time_now, downloaded_data_len - resume_downloaded_data_len)\n+ eta = FormatProgressInfos.calc_eta(start, time_now, 100 - resume_percent, percent - resume_percent)\n+ speed = FormatProgressInfos.calc_speed(start, time_now, downloaded_data_len - resume_downloaded_data_len)\n data_len = None\n if percent > 0:\n data_len = int(downloaded_data_len * 100 / percent)\n", "difficulty": 2, "changed_files": ["yt_dlp/downloader/http.py", "yt_dlp/downloader/rtmp.py"], "commit_link": "https://github.com/yt-dlp/yt-dlp/tree/43dd59c3137df77f5dd22cef4cb7bedfe9f6b12e"}
data/python/4410203.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"language": "Python", "id": 53, "repo_owner": "mosaicml", "repo_name": "composer", "head_branch": "error_logging_callback", "workflow_name": "Smoketest", "workflow_filename": "smoketest.yaml", "workflow_path": ".github/workflows/smoketest.yaml", "contributor": "bmosaicml", "sha_fail": "4410203c56984c613d23f29a81ecd1b96c57b1ee", "sha_success": "e4eb7eeac2e76c3287bdd650407e4067ad3a7625", "workflow": "name: Smoketest\non:\n push:\n branches:\n - dev\n - main\n - release/**\n pull_request:\n workflow_call:\n workflow_dispatch:\n# Cancel old runs when a new commit is pushed to the same branch if not on main or dev\nconcurrency:\n group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}\n cancel-in-progress: ${{ github.ref != 'refs/heads/main' && github.ref != 'refs/heads/dev' }}\ndefaults:\n run:\n working-directory: .\njobs:\n smoketest:\n runs-on: ubuntu-20.04\n timeout-minutes: 10\n strategy:\n matrix:\n python_version:\n - \"3.8\"\n - \"3.9\"\n - \"3.10\"\n steps:\n - uses: actions/checkout@v3\n - uses: actions/setup-python@v4\n with:\n python-version: ${{ matrix.python_version }}\n - name: Setup\n run: |\n set -ex\n python -m pip install --upgrade 'pip<23' wheel\n python -m pip install --upgrade .\n python -m pip install pytest==7.2.1 pytest_codeblocks==0.16.1\n - name: Run checks\n run: |\n pytest tests/test_smoketest.py\n", "logs": [{"step_name": "smoketest (3.8)/5_Run checks.txt", "log": "##[group]Run pytest tests/test_smoketest.py\n\u001b[36;1mpytest tests/test_smoketest.py\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.8.18/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.8.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.8.18/x64/lib\n##[endgroup]\nImportError while loading conftest '/home/runner/work/composer/composer/tests/conftest.py'.\ntests/conftest.py:9: in <module>\n from composer.utils import reproducibility\ncomposer/__init__.py:10: in <module>\n from composer.trainer import Trainer\ncomposer/trainer/__init__.py:6: in <module>\n from composer.trainer.trainer import Trainer\ncomposer/trainer/trainer.py:37: in <module>\n from composer.callbacks import CheckpointSaver, OptimizerMonitor\ncomposer/callbacks/__init__.py:12: in <module>\n from composer.callbacks.eval_output_logging_callback import EvalOutputLogging\ncomposer/callbacks/eval_output_logging_callback.py:16: in <module>\n from composer.datasets.in_context_learning_evaluation import (InContextLearningCodeEvalDataset,\ncomposer/datasets/in_context_learning_evaluation.py:13: in <module>\n import transformers\nE ModuleNotFoundError: No module named 'transformers'\n##[error]Process completed with exit code 4.\n"}, {"step_name": "smoketest (3.9)/5_Run checks.txt", "log": "##[group]Run pytest tests/test_smoketest.py\n\u001b[36;1mpytest tests/test_smoketest.py\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.9.18/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib\n##[endgroup]\nImportError while loading conftest '/home/runner/work/composer/composer/tests/conftest.py'.\ntests/conftest.py:9: in <module>\n from composer.utils import reproducibility\ncomposer/__init__.py:10: in <module>\n from composer.trainer import Trainer\ncomposer/trainer/__init__.py:6: in <module>\n from composer.trainer.trainer import Trainer\ncomposer/trainer/trainer.py:37: in <module>\n from composer.callbacks import CheckpointSaver, OptimizerMonitor\ncomposer/callbacks/__init__.py:12: in <module>\n from composer.callbacks.eval_output_logging_callback import EvalOutputLogging\ncomposer/callbacks/eval_output_logging_callback.py:16: in <module>\n from composer.datasets.in_context_learning_evaluation import (InContextLearningCodeEvalDataset,\ncomposer/datasets/in_context_learning_evaluation.py:13: in <module>\n import transformers\nE ModuleNotFoundError: No module named 'transformers'\n##[error]Process completed with exit code 4.\n"}, {"step_name": "smoketest (3.10)/5_Run checks.txt", "log": "##[group]Run pytest tests/test_smoketest.py\n\u001b[36;1mpytest tests/test_smoketest.py\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.10.13/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.10.13/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.10.13/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.10.13/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.10.13/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.10.13/x64/lib\n##[endgroup]\nImportError while loading conftest '/home/runner/work/composer/composer/tests/conftest.py'.\ntests/conftest.py:9: in <module>\n from composer.utils import reproducibility\ncomposer/__init__.py:10: in <module>\n from composer.trainer import Trainer\ncomposer/trainer/__init__.py:6: in <module>\n from composer.trainer.trainer import Trainer\ncomposer/trainer/trainer.py:37: in <module>\n from composer.callbacks import CheckpointSaver, OptimizerMonitor\ncomposer/callbacks/__init__.py:12: in <module>\n from composer.callbacks.eval_output_logging_callback import EvalOutputLogging\ncomposer/callbacks/eval_output_logging_callback.py:16: in <module>\n from composer.datasets.in_context_learning_evaluation import (InContextLearningCodeEvalDataset,\ncomposer/datasets/in_context_learning_evaluation.py:13: in <module>\n import transformers\nE ModuleNotFoundError: No module named 'transformers'\n##[error]Process completed with exit code 4.\n"}], "diff": "diff --git a/composer/callbacks/eval_output_logging_callback.py b/composer/callbacks/eval_output_logging_callback.py\nindex 48e4f76d..65cbdebb 100644\n--- a/composer/callbacks/eval_output_logging_callback.py\n+++ b/composer/callbacks/eval_output_logging_callback.py\n@@ -7,7 +7,7 @@ import hashlib\n import os\n import random\n import shutil\n-import time\n+import tempfile\n from typing import Callable, Optional\n \n from torch.utils.data import DataLoader\n@@ -58,6 +58,8 @@ class EvalOutputLogging(Callback):\n self.hash = hashlib.sha256()\n self.destination_file = None\n \n+ # with tempfile.NamedTemporaryFile\n+ # tmp_dir =\n def _write_tables_to_output_dir(self, state: State):\n try:\n import pandas as pd\n@@ -66,16 +68,9 @@ class EvalOutputLogging(Callback):\n conda_package='pandas',\n conda_channel='conda-forge') from e\n # write tmp files\n- self.hash.update((str(time.time()) + str(random.randint(0, 1_000_000))).encode('utf-8'))\n- tmp_dir = os.getcwd() + '/' + self.hash.hexdigest()\n-\n- if not os.path.exists(tmp_dir):\n- with dist.local_rank_zero_download_and_wait(tmp_dir):\n- if dist.get_local_rank() == 0:\n- os.mkdir(tmp_dir)\n \n full_df = pd.DataFrame()\n- file_name = f'eval-outputs-ba{state.timestamp.batch.value}.tsv'\n+ upload_file_name = f'eval-outputs-ba{state.timestamp.batch.value}.tsv'\n \n for benchmark in self.table:\n cols, rows = self.table[benchmark]\n@@ -84,18 +79,14 @@ class EvalOutputLogging(Callback):\n df['benchmark'] = benchmark\n full_df = pd.concat([full_df, df], ignore_index=True)\n \n- with dist.local_rank_zero_download_and_wait(f'{tmp_dir}/{file_name}'):\n- if dist.get_local_rank() == 0:\n- with open(f'{tmp_dir}/{file_name}', 'wb') as f:\n- full_df.to_csv(f, sep='\\t', index=False)\n+ tmp_file = ''\n+ with tempfile.NamedTemporaryFile('wb') as f:\n+ full_df.to_csv(f, sep='\\t', index=False)\n+ tmp_file = f.name\n \n # copy/upload tmp files\n- _write(destination_path=f'{self.output_directory}/{file_name}', src_file=f'{tmp_dir}/{file_name}')\n- os.remove(f'{tmp_dir}/{file_name}')\n- self.destination_file = f'{self.output_directory}/{file_name}'\n-\n- # delete tmp files\n- os.rmdir(tmp_dir)\n+ _write(destination_path=f'{self.output_directory}/{upload_file_name}', src_file=tmp_file)\n+ self.destination_file = f'{self.output_directory}/{upload_file_name}'\n \n def _prep_response_cache(self, state, cache):\n benchmark = state.dataloader_label\ndiff --git a/composer/datasets/in_context_learning_evaluation.py b/composer/datasets/in_context_learning_evaluation.py\nindex e1566509..1a53f75c 100644\n--- a/composer/datasets/in_context_learning_evaluation.py\n+++ b/composer/datasets/in_context_learning_evaluation.py\n@@ -10,7 +10,6 @@ import random\n from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union\n \n import torch\n-import transformers\n from torch.utils.data import DataLoader, Dataset\n from tqdm import tqdm\n \n", "difficulty": 1, "changed_files": ["composer/callbacks/eval_output_logging_callback.py", "composer/datasets/in_context_learning_evaluation.py"], "commit_link": "https://github.com/mosaicml/composer/tree/4410203c56984c613d23f29a81ecd1b96c57b1ee"}
data/python/44b56e0.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"language": "Python", "id": 1, "repo_owner": "huggingface", "repo_name": "accelerate", "head_branch": "main", "workflow_name": "Quality Check", "workflow_filename": "quality.yml", "workflow_path": ".github/workflows/quality.yml", "contributor": "PhilJd", "sha_fail": "44b56e01683771fb4ca583f9ea57c67dcee8e779", "sha_success": "90bb882770fafc8db2fabc7fe58f446e12b55d77", "workflow": "name: Quality Check\n\non: [pull_request]\n\njobs:\n quality:\n runs-on: ubuntu-latest\n steps:\n - uses: actions/[email protected]\n - name: Set up Python 3.8\n uses: actions/setup-python@v3\n with:\n python-version: 3.8\n - name: Install Python dependencies\n run: pip install -e .[quality]\n - name: Run Quality check\n run: make quality\n - name: Check if failure\n if: ${{ failure() }}\n run: |\n echo \"Quality check failed. Please ensure the right dependency versions are installed with 'pip install -e .[quality]' and rerun 'make style; make quality;'\" >> $GITHUB_STEP_SUMMARY\n\n", "logs": [{"step_name": "quality/5_Run Quality check.txt", "log": "##[group]Run make quality\n\u001b[36;1mmake quality\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.8.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.8.18/x64/lib\n##[endgroup]\nblack --required-version 23 --check tests src examples benchmarks utils\nAll done! \u2728 \ud83c\udf70 \u2728\n118 files would be left unchanged.\nruff tests src examples benchmarks utils\ndoc-builder style src/accelerate docs/source --max_len 119 --check_only\nTraceback (most recent call last):\n File \"/opt/hostedtoolcache/Python/3.8.18/x64/bin/doc-builder\", line 8, in <module>\n sys.exit(main())\n File \"/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/site-packages/doc_builder/commands/doc_builder_cli.py\", line 47, in main\n args.func(args)\n File \"/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/site-packages/doc_builder/commands/style.py\", line 28, in style_command\n raise ValueError(f\"{len(changed)} files should be restyled!\")\nValueError: 1 files should be restyled!\nmake: *** [Makefile:17: quality] Error 1\n##[error]Process completed with exit code 2.\n"}], "diff": "diff --git a/src/accelerate/big_modeling.py b/src/accelerate/big_modeling.py\nindex 5fd2bd5..6a2392a 100644\n--- a/src/accelerate/big_modeling.py\n+++ b/src/accelerate/big_modeling.py\n@@ -73,7 +73,9 @@ def init_empty_weights(include_buffers: bool = None):\n \n Any model created under this context manager has no weights. As such you can't do something like\n `model.to(some_device)` with it. To load weights inside your empty model, see [`load_checkpoint_and_dispatch`].\n- Make sure to overwrite the default device_map param, otherwise dispatch is not called.\n+ Make sure to overwrite the default device_map param for [`load_checkpoint_and_dispatch`], otherwise dispatch is not\n+ called.\n+\n </Tip>\n \"\"\"\n if include_buffers is None:\n@@ -480,7 +482,7 @@ def load_checkpoint_and_dispatch(\n \n To have Accelerate compute the most optimized `device_map` automatically, set `device_map=\"auto\"`. For more\n information about each option see [here](../concept_guides/big_model_inference#designing-a-device-map).\n- Defaults to None, which means `dispatch_model` will not be called.\n+ Defaults to None, which means [`dispatch_model`] will not be called.\n max_memory (`Dict`, *optional*):\n A dictionary device identifier to maximum memory. Will default to the maximum memory available for each GPU\n and the available CPU RAM if unset.\n", "difficulty": 0, "changed_files": ["src/accelerate/big_modeling.py"], "commit_link": "https://github.com/huggingface/accelerate/tree/44b56e01683771fb4ca583f9ea57c67dcee8e779"}
data/python/4d5898b.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"language": "Python", "id": 14, "repo_owner": "canonical", "repo_name": "cloud-init", "head_branch": "holman/dhcpcd", "workflow_name": "Lint Tests", "workflow_filename": "check_format.yml", "workflow_path": ".github/workflows/check_format.yml", "contributor": "holmanb", "sha_fail": "4d5898b8a73c93e1ed4434744c2fa7c3f7fbd501", "sha_success": "2bd296ef8983a716a7b8d107571181333f093109", "workflow": "name: Lint Tests\non:\n pull_request:\n push:\n branches:\n - main\n\nconcurrency:\n group: 'ci-${{ github.workflow }}-${{ github.ref }}'\n cancel-in-progress: true\ndefaults:\n run:\n shell: sh -ex {0}\n\njobs:\n check_format:\n strategy:\n fail-fast: false\n matrix:\n env: [ruff, mypy, pylint, black, isort]\n lint-with:\n - {tip-versions: false, os: ubuntu-20.04}\n - {tip-versions: true, os: ubuntu-latest}\n name: Check ${{ matrix.lint-with.tip-versions && 'tip-' || '' }}${{ matrix.env }}\n runs-on: ${{ matrix.lint-with.os }}\n steps:\n - name: \"Checkout #1\"\n uses: actions/[email protected]\n\n - name: \"Checkout #2 (for tools/read-version)\"\n run: |\n git fetch --unshallow\n git remote add upstream https://git.launchpad.net/cloud-init\n\n - name: Dependencies\n run: |\n sudo DEBIAN_FRONTEND=noninteractive apt-get -qy update\n sudo DEBIAN_FRONTEND=noninteractive apt-get -qy install tox\n\n - name: Print version\n run: python3 --version\n\n - name: Test\n if: ${{ !matrix.lint-with.tip-versions }}\n env:\n # matrix env: not to be confused w/environment variables or testenv\n TOXENV: ${{ matrix.env }}\n run: tox\n - name: Test (tip versions)\n if: matrix.lint-with.tip-versions\n continue-on-error: true\n env:\n TOXENV: tip-${{ matrix.env }}\n run: tox\n schema-format:\n strategy:\n fail-fast: false\n name: Check json format\n runs-on: ubuntu-22.04\n steps:\n - name: \"Checkout #1\"\n uses: actions/[email protected]\n\n - name: \"Test format\"\n run: |\n tools/check_json_format.sh cloudinit/config/schemas/schema-cloud-config-v1.json\n tools/check_json_format.sh cloudinit/config/schemas/schema-network-config-v1.json\n tools/check_json_format.sh cloudinit/config/schemas/versions.schema.cloud-config.json\n\n doc:\n strategy:\n fail-fast: false\n name: Check docs\n runs-on: ubuntu-22.04\n steps:\n - name: \"Checkout #1\"\n uses: actions/[email protected]\n\n - name: \"Checkout #2 (for tools/read-version)\"\n run: |\n git fetch --unshallow\n git remote add upstream https://git.launchpad.net/cloud-init\n - name: \"Install Python 3.10\"\n uses: actions/setup-python@v4\n with:\n python-version: '3.10.8'\n - name: \"Install dependencies\"\n run: |\n sudo DEBIAN_FRONTEND=noninteractive apt-get -qy update\n sudo DEBIAN_FRONTEND=noninteractive apt-get -qy install tox\n - name: \"Spellcheck\"\n run: |\n tox\n env:\n TOXENV: doc-spelling\n - name: \"Build docs\"\n env:\n TOXENV: doc\n run: |\n tox\n\n shell-lint:\n name: Shell Lint\n runs-on: ubuntu-22.04\n steps:\n - name: Checkout\n uses: actions/[email protected]\n\n - name: Install ShellCheck\n run: |\n sudo apt-get update\n sudo apt install shellcheck\n\n - name: Run ShellCheck\n run: |\n shellcheck ./tools/ds-identify\n\n check-cla-signers:\n runs-on: ubuntu-22.04\n steps:\n - uses: actions/[email protected]\n\n - name: Check CLA signers file\n run: tools/check-cla-signers\n", "logs": [{"step_name": "Check pylint/6_Test.txt", "log": "##[group]Run tox\n\u001b[36;1mtox\u001b[0m\nshell: /usr/bin/sh -ex {0}\nenv:\n TOXENV: pylint\n##[endgroup]\n+ tox\nGLOB sdist-make: /home/runner/work/cloud-init/cloud-init/setup.py\npylint create: /home/runner/work/cloud-init/cloud-init/.tox/pylint\npylint installdeps: pylint===2.13.9, -r/home/runner/work/cloud-init/cloud-init/test-requirements.txt, -r/home/runner/work/cloud-init/cloud-init/integration-requirements.txt\npylint inst: /home/runner/work/cloud-init/cloud-init/.tox/.tmp/package/1/cloud-init-23.4+88.g0e8fe220.zip\npylint installed: adal==1.2.7,anyio==4.2.0,appdirs==1.4.4,applicationinsights==0.11.10,argcomplete==3.1.6,astroid==2.11.7,attrs==23.2.0,autopage==0.5.2,azure-cli-core==2.56.0,azure-cli-telemetry==1.1.0,azure-common==1.1.28,azure-core==1.29.6,azure-identity==1.15.0,azure-mgmt-compute==30.4.0,azure-mgmt-core==1.4.0,azure-mgmt-network==25.2.0,azure-mgmt-resource==23.0.1,bcrypt==4.1.2,boto3==1.34.17,botocore==1.34.17,cachetools==5.3.2,certifi==2023.11.17,cffi==1.16.0,charset-normalizer==3.3.2,circuitbreaker==1.4.0,cliff==4.5.0,cloud-init==23.4+88.g0e8fe220,cmd2==2.4.3,configobj==5.0.8,coverage==7.4.0,cryptography==41.0.7,debtcollector==2.5.0,decorator==5.1.1,dill==0.3.7,distro==1.9.0,dogpile.cache==1.3.0,exceptiongroup==1.2.0,google-api-core==2.15.0,google-api-python-client==2.113.0,google-auth==2.26.2,google-auth-httplib2==0.2.0,googleapis-common-protos==1.62.0,httplib2==0.22.0,humanfriendly==10.0,ibm-cloud-sdk-core==3.18.2,ibm-platform-services==0.49.0,ibm-vpc==0.20.0,idna==3.6,importlib-metadata==7.0.1,importlib-resources==6.1.1,iniconfig==2.0.0,iso8601==2.1.0,isodate==0.6.1,isort==5.13.2,Jinja2==3.1.3,jmespath==1.0.1,jsonpatch==1.33,jsonpointer==2.4,jsonschema==4.20.0,jsonschema-specifications==2023.12.1,keystoneauth1==5.5.0,knack==0.11.0,lazy-object-proxy==1.10.0,MarkupSafe==2.1.3,mccabe==0.7.0,msal==1.24.0b2,msal-extensions==1.0.0,msgpack==1.0.7,msrest==0.7.1,msrestazure==0.6.4,netaddr==0.10.1,netifaces==0.11.0,oauthlib==3.2.2,oci==2.118.2,openstacksdk==1.4.0,os-service-types==1.7.0,osc-lib==3.0.0,oslo.config==9.3.0,oslo.i18n==6.2.0,oslo.serialization==5.3.0,oslo.utils==6.3.0,packaging==23.2,paramiko==3.4.0,passlib==1.7.4,pbr==6.0.0,pkginfo==1.9.6,pkgutil-resolve-name==1.3.10,platformdirs==4.1.0,pluggy==1.3.0,portalocker==2.8.2,prettytable==3.9.0,protobuf==3.19.6,psutil==5.9.7,pyasn1==0.5.1,pyasn1-modules==0.3.0,pycloudlib==1!5.12.1,pycparser==2.21,pygments==2.17.2,PyJWT==2.8.0,pylint==2.13.9,PyNaCl==1.5.0,pyOpenSSL==23.3.0,pyparsing==2.4.7,pyperclip==1.8.2,pyserial==3.5,pytest==7.4.4,pytest-cov==4.1.0,pytest-mock==3.12.0,python-cinderclient==9.4.0,python-dateutil==2.8.2,python-keystoneclient==5.3.0,python-novaclient==18.4.0,python-openstackclient==6.4.0,pytz==2023.3.post1,PyYAML==6.0.1,qemu.qmp==0.0.3,referencing==0.32.1,requests==2.31.0,requests-oauthlib==1.3.1,requestsexceptions==1.4.0,responses==0.24.1,rfc3986==2.0.0,rpds-py==0.17.1,rsa==4.9,s3transfer==0.10.0,simplejson==3.19.2,six==1.16.0,sniffio==1.3.0,stevedore==5.1.0,tabulate==0.9.0,toml==0.10.0,tomli==2.0.1,typing-extensions==4.9.0,tzdata==2023.4,uritemplate==4.1.1,urllib3==2.1.0,wcwidth==0.2.13,wrapt==1.16.0,zipp==3.17.0\npylint run-test-pre: PYTHONHASHSEED='1345909868'\npylint run-test: commands[0] | /home/runner/work/cloud-init/cloud-init/.tox/pylint/bin/python -m pylint cloudinit/ tests/ tools/ conftest.py setup.py\n************* Module cloudinit.net.dhcp\ncloudinit/net/dhcp.py:538: [W0235(useless-super-delegation), Dhcpcd.__init__] Useless super delegation in method '__init__'\n\n------------------------------------\nYour code has been rated at 10.00/10\n\nERROR: InvocationError for command /home/runner/work/cloud-init/cloud-init/.tox/pylint/bin/python -m pylint cloudinit/ tests/ tools/ conftest.py setup.py (exited with code 4)\n___________________________________ summary ____________________________________\nERROR: pylint: commands failed\n##[error]Process completed with exit code 1.\n"}], "diff": "diff --git a/cloudinit/distros/__init__.py b/cloudinit/distros/__init__.py\nindex 5c891f26e..c02166ad2 100644\n--- a/cloudinit/distros/__init__.py\n+++ b/cloudinit/distros/__init__.py\n@@ -170,6 +170,7 @@ class Distro(persistence.CloudInitPickleMixin, metaclass=abc.ABCMeta):\n self._runner = helpers.Runners(paths)\n self.package_managers: List[PackageManager] = []\n self._dhcp_client = None\n+ self._fallback_interface = None\n \n def _unpickle(self, ci_pkl_version: int) -> None:\n \"\"\"Perform deserialization fixes for Distro.\"\"\"\n@@ -1274,6 +1275,13 @@ class Distro(persistence.CloudInitPickleMixin, metaclass=abc.ABCMeta):\n \"/bin/true\",\n ] + ([\"-cf\", config_file, interface] if config_file else [interface])\n \n+ @property\n+ def fallback_interface(self):\n+ \"\"\"Determine the network interface used during local network config.\"\"\"\n+ if self._fallback_interface is None:\n+ self._fallback_interface = net.find_fallback_nic()\n+ return self._fallback_interface\n+\n \n def _apply_hostname_transformations_to_url(url: str, transformations: list):\n \"\"\"\ndiff --git a/cloudinit/net/dhcp.py b/cloudinit/net/dhcp.py\nindex 9c94b3f92..7b2d41df1 100644\n--- a/cloudinit/net/dhcp.py\n+++ b/cloudinit/net/dhcp.py\n@@ -19,8 +19,6 @@ import configobj\n \n from cloudinit import subp, temp_utils, util\n from cloudinit.net import (\n- find_fallback_nic,\n- get_devicelist,\n get_ib_interface_hwaddr,\n get_interface_mac,\n is_ib_interface,\n@@ -98,17 +96,8 @@ def maybe_perform_dhcp_discovery(distro, nic=None, dhcp_log_func=None):\n from the dhclient discovery if run, otherwise an empty list is\n returned.\n \"\"\"\n- if nic is None:\n- nic = find_fallback_nic()\n- if nic is None:\n- LOG.debug(\"Skip dhcp_discovery: Unable to find fallback nic.\")\n- raise NoDHCPLeaseInterfaceError()\n- elif nic not in get_devicelist():\n- LOG.debug(\n- \"Skip dhcp_discovery: nic %s not found in get_devicelist.\", nic\n- )\n- raise NoDHCPLeaseInterfaceError()\n- return distro.dhcp_client.dhcp_discovery(nic, dhcp_log_func, distro)\n+ interface = nic or distro.fallback_interface\n+ return distro.dhcp_client.dhcp_discovery(interface, dhcp_log_func, distro)\n \n \n def networkd_parse_lease(content):\n@@ -185,7 +174,7 @@ class DhcpClient(abc.ABC):\n def parse_static_routes(routes: str) -> List[Tuple[str, str]]:\n return []\n \n- def get_newest_lease(self, distro) -> Dict[str, Any]:\n+ def get_newest_lease(self, interface: str) -> Dict[str, Any]:\n return {}\n \n \n@@ -218,15 +207,15 @@ class IscDhclient(DhcpClient):\n dhcp_leases.append(dict(lease_options))\n return dhcp_leases\n \n- def get_newest_lease(self, distro) -> Dict[str, Any]:\n+ def get_newest_lease(self, interface: str) -> Dict[str, Any]:\n \"\"\"Get the most recent lease from the ephemeral phase as a dict.\n \n Return a dict of dhcp options. The dict contains key value\n pairs from the most recent lease.\n \n- @param distro: a distro object - not used in this class, but required\n- for function signature compatibility with other classes\n- that require a distro object\n+ @param interface: an interface string - not used in this class, but\n+ required for function signature compatibility with other classes\n+ that require a distro object\n @raises: InvalidDHCPLeaseFileError on empty or unparseable leasefile\n content.\n \"\"\"\n@@ -355,7 +344,7 @@ class IscDhclient(DhcpClient):\n )\n if dhcp_log_func is not None:\n dhcp_log_func(out, err)\n- lease = self.get_newest_lease(distro)\n+ lease = self.get_newest_lease(interface)\n if lease:\n return lease\n raise InvalidDHCPLeaseFileError()\n@@ -535,9 +524,6 @@ class IscDhclient(DhcpClient):\n class Dhcpcd(DhcpClient):\n client_name = \"dhcpcd\"\n \n- def __init__(self):\n- super().__init__()\n-\n def dhcp_discovery(\n self,\n interface: str,\n@@ -547,12 +533,12 @@ class Dhcpcd(DhcpClient):\n \"\"\"Run dhcpcd on the interface without scripts/filesystem artifacts.\n \n @param interface: Name of the network interface on which to send a\n- dhcp request\n+ dhcp request\n @param dhcp_log_func: A callable accepting the client output and\n- error streams.\n+ error streams.\n @param distro: a distro object for network interface manipulation\n @return: dict of lease options representing the most recent dhcp lease\n- parsed from the dhclient.lease file\n+ parsed from the dhclient.lease file\n \"\"\"\n LOG.debug(\"Performing a dhcp discovery on %s\", interface)\n \n@@ -580,7 +566,7 @@ class Dhcpcd(DhcpClient):\n )\n if dhcp_log_func is not None:\n dhcp_log_func(out, err)\n- lease = self.get_newest_lease(distro)\n+ lease = self.get_newest_lease(interface)\n if lease:\n return lease\n raise NoDHCPLeaseError(\"No lease found\")\n@@ -644,12 +630,10 @@ class Dhcpcd(DhcpClient):\n # for compatibility we still expect a list of leases\n return lease\n \n- def get_newest_lease(self, distro) -> Dict[str, Any]:\n- \"\"\"Return a lease\n-\n- Return a list of dicts of dhcp options. Each dict contains key value\n- pairs a specific lease in order from oldest to newest.\n+ def get_newest_lease(self, interface: str) -> Dict[str, Any]:\n+ \"\"\"Return a dict of dhcp options.\n \n+ @param interface: which interface to dump the lease from\n @raises: InvalidDHCPLeaseFileError on empty or unparseable leasefile\n content.\n \"\"\"\n@@ -659,11 +643,11 @@ class Dhcpcd(DhcpClient):\n [\n \"dhcpcd\",\n \"--dumplease\",\n- distro.fallback_interface,\n+ interface,\n ],\n rcs=[0, 1],\n ).stdout,\n- distro.fallback_interface,\n+ interface,\n )\n \n except subp.ProcessExecutionError as error:\n@@ -781,17 +765,17 @@ class Udhcpc(DhcpClient):\n if dhcp_log_func is not None:\n dhcp_log_func(out, err)\n \n- return self.get_newest_lease(distro)\n+ return self.get_newest_lease(interface)\n \n- def get_newest_lease(self, distro) -> Dict[str, Any]:\n+ def get_newest_lease(self, interface: str) -> Dict[str, Any]:\n \"\"\"Get the most recent lease from the ephemeral phase as a dict.\n \n Return a dict of dhcp options. The dict contains key value\n pairs from the most recent lease.\n \n- @param distro: a distro object - not used in this class, but required\n- for function signature compatibility with other classes\n- that require a distro object\n+ @param interface: an interface name - not used in this class, but\n+ required for function signature compatibility with other classes\n+ that require a distro object\n @raises: InvalidDHCPLeaseFileError on empty or unparseable leasefile\n content.\n \"\"\"\ndiff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py\nindex c207b5ed6..f9650268a 100644\n--- a/cloudinit/sources/__init__.py\n+++ b/cloudinit/sources/__init__.py\n@@ -610,13 +610,11 @@ class DataSource(CloudInitPickleMixin, metaclass=abc.ABCMeta):\n @property\n def fallback_interface(self):\n \"\"\"Determine the network interface used during local network config.\"\"\"\n- if self._fallback_interface is None:\n- self._fallback_interface = net.find_fallback_nic()\n- if self._fallback_interface is None:\n- LOG.warning(\n- \"Did not find a fallback interface on %s.\", self.cloud_name\n- )\n- return self._fallback_interface\n+ if self.distro.fallback_interface is None:\n+ LOG.warning(\n+ \"Did not find a fallback interface on %s.\", self.cloud_name\n+ )\n+ return self.distro.fallback_interface\n \n @property\n def platform_type(self):\ndiff --git a/tests/unittests/net/test_dhcp.py b/tests/unittests/net/test_dhcp.py\nindex ffa3eab17..9af5f93aa 100644\n--- a/tests/unittests/net/test_dhcp.py\n+++ b/tests/unittests/net/test_dhcp.py\n@@ -372,20 +372,7 @@ class TestDHCPDiscoveryClean(CiTestCase):\n with_logs = True\n ib_address_prefix = \"00:00:00:00:00:00:00:00:00:00:00:00\"\n \n- @mock.patch(\"cloudinit.net.dhcp.find_fallback_nic\")\n- def test_no_fallback_nic_found(self, m_fallback_nic):\n- \"\"\"Log and do nothing when nic is absent and no fallback is found.\"\"\"\n- m_fallback_nic.return_value = None # No fallback nic found\n-\n- with pytest.raises(NoDHCPLeaseInterfaceError):\n- maybe_perform_dhcp_discovery(MockDistro())\n-\n- self.assertIn(\n- \"Skip dhcp_discovery: Unable to find fallback nic.\",\n- self.logs.getvalue(),\n- )\n-\n- @mock.patch(\"cloudinit.net.dhcp.find_fallback_nic\", return_value=\"eth9\")\n+ @mock.patch(\"cloudinit.distros.net.find_fallback_nic\", return_value=\"eth9\")\n @mock.patch(\"cloudinit.net.dhcp.os.remove\")\n @mock.patch(\"cloudinit.net.dhcp.subp.subp\")\n @mock.patch(\"cloudinit.net.dhcp.subp.which\")\n@@ -406,7 +393,7 @@ class TestDHCPDiscoveryClean(CiTestCase):\n self.logs.getvalue(),\n )\n \n- @mock.patch(\"cloudinit.net.dhcp.find_fallback_nic\", return_value=\"eth9\")\n+ @mock.patch(\"cloudinit.distros.net.find_fallback_nic\", return_value=\"eth9\")\n @mock.patch(\"cloudinit.net.dhcp.os.remove\")\n @mock.patch(\"cloudinit.net.dhcp.subp.subp\")\n @mock.patch(\"cloudinit.net.dhcp.subp.which\")\n@@ -435,17 +422,6 @@ class TestDHCPDiscoveryClean(CiTestCase):\n self.logs.getvalue(),\n )\n \n- @mock.patch(\"cloudinit.net.dhcp.find_fallback_nic\", return_value=None)\n- def test_provided_nic_does_not_exist(self, m_fallback_nic):\n- \"\"\"When the provided nic doesn't exist, log a message and no-op.\"\"\"\n- with pytest.raises(NoDHCPLeaseInterfaceError):\n- maybe_perform_dhcp_discovery(MockDistro(), \"idontexist\")\n-\n- self.assertIn(\n- \"Skip dhcp_discovery: nic idontexist not found in get_devicelist.\",\n- self.logs.getvalue(),\n- )\n-\n @mock.patch(\"cloudinit.net.dhcp.subp.which\")\n @mock.patch(\"cloudinit.net.dhcp.find_fallback_nic\")\n def test_absent_dhclient_command(self, m_fallback, m_which):\ndiff --git a/tests/unittests/sources/test_cloudstack.py b/tests/unittests/sources/test_cloudstack.py\nindex 8ee04b3e1..b33c5e492 100644\n--- a/tests/unittests/sources/test_cloudstack.py\n+++ b/tests/unittests/sources/test_cloudstack.py\n@@ -244,6 +244,13 @@ class TestCloudStackHostname(CiTestCase):\n )\n )\n \n+ self.patches.enter_context(\n+ mock.patch(\n+ \"cloudinit.distros.net.find_fallback_nic\",\n+ return_value=\"eth0\",\n+ )\n+ )\n+\n self.patches.enter_context(\n mock.patch(\n MOD_PATH\n@@ -304,7 +311,7 @@ class TestCloudStackHostname(CiTestCase):\n )\n \n ds = DataSourceCloudStack(\n- {}, MockDistro(), helpers.Paths({\"run_dir\": self.tmp})\n+ {}, ubuntu.Distro(\"\", {}, {}), helpers.Paths({\"run_dir\": self.tmp})\n )\n ds._fallback_interface = \"eth0\"\n with mock.patch(MOD_PATH + \".util.load_file\"):\ndiff --git a/tests/unittests/sources/test_ec2.py b/tests/unittests/sources/test_ec2.py\nindex 5f60ad737..32fed89da 100644\n--- a/tests/unittests/sources/test_ec2.py\n+++ b/tests/unittests/sources/test_ec2.py\n@@ -10,6 +10,7 @@ import requests\n import responses\n \n from cloudinit import helpers\n+from cloudinit.distros import ubuntu\n from cloudinit.sources import DataSourceEc2 as ec2\n from tests.unittests import helpers as test_helpers\n \n@@ -342,9 +343,11 @@ class TestEc2(test_helpers.ResponsesTestCase):\n p.start()\n self.addCleanup(p.stop)\n \n- def _setup_ds(self, sys_cfg, platform_data, md, md_version=None):\n+ def _setup_ds(\n+ self, sys_cfg, platform_data, md, md_version=None, distro=None\n+ ):\n self.uris = []\n- distro = mock.MagicMock()\n+ distro = distro or mock.MagicMock()\n distro.get_tmp_exec_path = self.tmp_dir\n paths = helpers.Paths({\"run_dir\": self.tmp})\n if sys_cfg is None:\n@@ -846,7 +849,7 @@ class TestEc2(test_helpers.ResponsesTestCase):\n \n @mock.patch(\"cloudinit.net.ephemeral.EphemeralIPv6Network\")\n @mock.patch(\"cloudinit.net.ephemeral.EphemeralIPv4Network\")\n- @mock.patch(\"cloudinit.net.find_fallback_nic\")\n+ @mock.patch(\"cloudinit.distros.net.find_fallback_nic\")\n @mock.patch(\"cloudinit.net.ephemeral.maybe_perform_dhcp_discovery\")\n @mock.patch(\"cloudinit.sources.DataSourceEc2.util.is_FreeBSD\")\n def test_ec2_local_performs_dhcp_on_non_bsd(\n@@ -873,6 +876,7 @@ class TestEc2(test_helpers.ResponsesTestCase):\n platform_data=self.valid_platform_data,\n sys_cfg={\"datasource\": {\"Ec2\": {\"strict_id\": False}}},\n md={\"md\": DEFAULT_METADATA},\n+ distro=ubuntu.Distro(\"\", {}, {}),\n )\n \n ret = ds.get_data()\ndiff --git a/tests/unittests/sources/test_init.py b/tests/unittests/sources/test_init.py\nindex 44d63b816..b3764945d 100644\n--- a/tests/unittests/sources/test_init.py\n+++ b/tests/unittests/sources/test_init.py\n@@ -6,6 +6,7 @@ import os\n import stat\n \n from cloudinit import importer, util\n+from cloudinit.distros import ubuntu\n from cloudinit.event import EventScope, EventType\n from cloudinit.helpers import Paths\n from cloudinit.sources import (\n@@ -73,7 +74,7 @@ class TestDataSource(CiTestCase):\n def setUp(self):\n super(TestDataSource, self).setUp()\n self.sys_cfg = {\"datasource\": {\"_undef\": {\"key1\": False}}}\n- self.distro = \"distrotest\" # generally should be a Distro object\n+ self.distro = ubuntu.Distro(\"\", {}, {})\n self.paths = Paths({})\n self.datasource = DataSource(self.sys_cfg, self.distro, self.paths)\n \n@@ -201,7 +202,7 @@ class TestDataSource(CiTestCase):\n for log in expected_logs:\n self.assertIn(log, logs)\n \n- @mock.patch(\"cloudinit.sources.net.find_fallback_nic\")\n+ @mock.patch(\"cloudinit.distros.net.find_fallback_nic\")\n def test_fallback_interface_is_discovered(self, m_get_fallback_nic):\n \"\"\"The fallback_interface is discovered via find_fallback_nic.\"\"\"\n m_get_fallback_nic.return_value = \"nic9\"\n@@ -221,7 +222,7 @@ class TestDataSource(CiTestCase):\n @mock.patch(\"cloudinit.sources.net.find_fallback_nic\")\n def test_wb_fallback_interface_is_cached(self, m_get_fallback_nic):\n \"\"\"The fallback_interface is cached and won't be rediscovered.\"\"\"\n- self.datasource._fallback_interface = \"nic10\"\n+ self.datasource.distro._fallback_interface = \"nic10\"\n self.assertEqual(\"nic10\", self.datasource.fallback_interface)\n m_get_fallback_nic.assert_not_called()\n \ndiff --git a/tests/unittests/sources/test_openstack.py b/tests/unittests/sources/test_openstack.py\nindex 127123cff..97cc8c94e 100644\n--- a/tests/unittests/sources/test_openstack.py\n+++ b/tests/unittests/sources/test_openstack.py\n@@ -338,7 +338,7 @@ class TestOpenStackDataSource(test_helpers.ResponsesTestCase):\n ds_os_local = ds.DataSourceOpenStackLocal(\n settings.CFG_BUILTIN, distro, helpers.Paths({\"run_dir\": self.tmp})\n )\n- ds_os_local._fallback_interface = \"eth9\" # Monkey patch for dhcp\n+ distro.fallback_interface = \"eth9\" # Monkey patch for dhcp\n m_dhcp.return_value = {\n \"interface\": \"eth9\",\n \"fixed-address\": \"192.168.2.9\",\n", "difficulty": 0, "changed_files": ["cloudinit/distros/__init__.py", "cloudinit/net/dhcp.py", "cloudinit/sources/__init__.py", "tests/unittests/net/test_dhcp.py", "tests/unittests/sources/test_cloudstack.py", "tests/unittests/sources/test_ec2.py", "tests/unittests/sources/test_init.py", "tests/unittests/sources/test_openstack.py"], "commit_link": "https://github.com/canonical/cloud-init/tree/4d5898b8a73c93e1ed4434744c2fa7c3f7fbd501"}
data/python/55d2e8d.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"language": "Python", "id": 67, "repo_owner": "canonical", "repo_name": "cloud-init", "head_branch": "main", "workflow_name": "Lint Tests", "workflow_filename": "check_format.yml", "workflow_path": ".github/workflows/check_format.yml", "contributor": "ani-sinha", "sha_fail": "55d2e8d4abb024997be878797d5625effad65d43", "sha_success": "9b3b3632cb86b74b79ed2b1fb3672a9f50604992", "workflow": "name: Lint Tests\non:\n pull_request:\n push:\n branches:\n - main\n\nconcurrency:\n group: 'ci-${{ github.workflow }}-${{ github.ref }}'\n cancel-in-progress: true\ndefaults:\n run:\n shell: sh -ex {0}\n\njobs:\n check_format:\n strategy:\n fail-fast: false\n matrix:\n env: [ruff, mypy, pylint, black, isort]\n lint-with:\n - {tip-versions: false, os: ubuntu-20.04}\n - {tip-versions: true, os: ubuntu-latest}\n name: Check ${{ matrix.lint-with.tip-versions && 'tip-' || '' }}${{ matrix.env }}\n runs-on: ${{ matrix.lint-with.os }}\n steps:\n - name: \"Checkout #1\"\n uses: actions/[email protected]\n\n - name: \"Checkout #2 (for tools/read-version)\"\n run: |\n git fetch --unshallow\n git remote add upstream https://git.launchpad.net/cloud-init\n\n - name: Dependencies\n run: |\n sudo DEBIAN_FRONTEND=noninteractive apt-get -qy update\n sudo DEBIAN_FRONTEND=noninteractive apt-get -qy install tox\n\n - name: Print version\n run: python3 --version\n\n - name: Test\n if: ${{ !matrix.lint-with.tip-versions }}\n env:\n # matrix env: not to be confused w/environment variables or testenv\n TOXENV: ${{ matrix.env }}\n run: tox\n - name: Test (tip versions)\n if: matrix.lint-with.tip-versions\n continue-on-error: true\n env:\n TOXENV: tip-${{ matrix.env }}\n run: tox\n schema-format:\n strategy:\n fail-fast: false\n name: Check json format\n runs-on: ubuntu-22.04\n steps:\n - name: \"Checkout #1\"\n uses: actions/[email protected]\n\n - name: \"Test format\"\n run: |\n tools/check_json_format.sh cloudinit/config/schemas/schema-cloud-config-v1.json\n tools/check_json_format.sh cloudinit/config/schemas/schema-network-config-v1.json\n tools/check_json_format.sh cloudinit/config/schemas/versions.schema.cloud-config.json\n\n doc:\n strategy:\n fail-fast: false\n name: Check docs\n runs-on: ubuntu-22.04\n steps:\n - name: \"Checkout #1\"\n uses: actions/[email protected]\n\n - name: \"Checkout #2 (for tools/read-version)\"\n run: |\n git fetch --unshallow\n git remote add upstream https://git.launchpad.net/cloud-init\n - name: \"Install Python 3.10\"\n uses: actions/setup-python@v4\n with:\n python-version: '3.10.8'\n - name: \"Install dependencies\"\n run: |\n sudo DEBIAN_FRONTEND=noninteractive apt-get -qy update\n sudo DEBIAN_FRONTEND=noninteractive apt-get -qy install tox lintian\n - name: \"Spellcheck\"\n run: |\n make check_spelling\n - name: \"Build docs\"\n env:\n TOXENV: doc\n run: |\n tox\n\n shell-lint:\n name: Shell Lint\n runs-on: ubuntu-22.04\n steps:\n - name: Checkout\n uses: actions/[email protected]\n\n - name: Install ShellCheck\n run: |\n sudo apt-get update\n sudo apt install shellcheck\n\n - name: Run ShellCheck\n run: |\n shellcheck ./tools/ds-identify\n\n check-cla-signers:\n runs-on: ubuntu-22.04\n steps:\n - uses: actions/[email protected]\n\n - name: Check CLA signers file\n run: tools/check-cla-signers\n", "logs": [{"step_name": "Check pylint/6_Test.txt", "log": "##[group]Run tox\n\u001b[36;1mtox\u001b[0m\nshell: /usr/bin/sh -ex {0}\nenv:\n TOXENV: pylint\n##[endgroup]\n+ tox\nGLOB sdist-make: /home/runner/work/cloud-init/cloud-init/setup.py\npylint create: /home/runner/work/cloud-init/cloud-init/.tox/pylint\npylint installdeps: pylint===2.13.9, -r/home/runner/work/cloud-init/cloud-init/test-requirements.txt, -r/home/runner/work/cloud-init/cloud-init/integration-requirements.txt\npylint inst: /home/runner/work/cloud-init/cloud-init/.tox/.tmp/package/1/cloud-init-23.4+28.ge396f22f.zip\npylint installed: adal==1.2.7,appdirs==1.4.4,applicationinsights==0.11.10,argcomplete==3.1.6,astroid==2.11.7,attrs==23.1.0,autopage==0.5.2,azure-cli-core==2.55.0,azure-cli-telemetry==1.1.0,azure-common==1.1.28,azure-core==1.29.5,azure-identity==1.15.0,azure-mgmt-compute==30.3.0,azure-mgmt-core==1.4.0,azure-mgmt-network==25.1.0,azure-mgmt-resource==23.0.1,bcrypt==4.1.1,boto3==1.33.9,botocore==1.33.9,cachetools==5.3.2,certifi==2023.11.17,cffi==1.16.0,charset-normalizer==3.3.2,circuitbreaker==1.4.0,cliff==4.4.0,cloud-init==23.4+28.ge396f22f,cmd2==2.4.3,configobj==5.0.8,coverage==7.3.2,cryptography==41.0.7,debtcollector==2.5.0,decorator==5.1.1,dill==0.3.7,distro==1.8.0,dogpile.cache==1.2.2,exceptiongroup==1.2.0,google-api-core==2.14.0,google-api-python-client==2.109.0,google-auth==2.25.1,google-auth-httplib2==0.1.1,googleapis-common-protos==1.61.0,httplib2==0.22.0,humanfriendly==10.0,ibm-cloud-sdk-core==3.18.0,ibm-platform-services==0.47.1,ibm-vpc==0.20.0,idna==3.6,importlib-metadata==7.0.0,importlib-resources==6.1.1,iniconfig==2.0.0,iso8601==2.1.0,isodate==0.6.1,isort==5.12.0,Jinja2==3.1.2,jmespath==1.0.1,jsonpatch==1.33,jsonpointer==2.4,jsonschema==4.20.0,jsonschema-specifications==2023.11.2,keystoneauth1==5.4.0,knack==0.11.0,lazy-object-proxy==1.9.0,MarkupSafe==2.1.3,mccabe==0.7.0,msal==1.26.0,msal-extensions==1.0.0,msgpack==1.0.7,msrest==0.7.1,msrestazure==0.6.4,netaddr==0.9.0,netifaces==0.11.0,oauthlib==3.2.2,oci==2.117.0,openstacksdk==1.4.0,os-service-types==1.7.0,osc-lib==2.9.0,oslo.config==9.2.0,oslo.i18n==6.2.0,oslo.serialization==5.2.0,oslo.utils==6.3.0,packaging==23.2,paramiko==3.3.1,passlib==1.7.4,pbr==6.0.0,pkginfo==1.9.6,pkgutil-resolve-name==1.3.10,platformdirs==4.1.0,pluggy==1.3.0,portalocker==2.8.2,prettytable==3.9.0,protobuf==3.19.6,psutil==5.9.6,pyasn1==0.5.1,pyasn1-modules==0.3.0,pycloudlib==1!5.11.0,pycparser==2.21,pygments==2.17.2,PyJWT==2.8.0,pylint==2.13.9,PyNaCl==1.5.0,pyOpenSSL==23.3.0,pyparsing==2.4.7,pyperclip==1.8.2,pyserial==3.5,pytest==7.4.3,pytest-cov==4.1.0,pytest-mock==3.12.0,python-cinderclient==9.4.0,python-dateutil==2.8.2,python-keystoneclient==5.2.0,python-novaclient==18.4.0,python-openstackclient==6.4.0,pytz==2023.3.post1,PyYAML==6.0.1,qemu.qmp==0.0.3,referencing==0.31.1,requests==2.31.0,requests-oauthlib==1.3.1,requestsexceptions==1.4.0,responses==0.24.1,rfc3986==2.0.0,rpds-py==0.13.2,rsa==4.9,s3transfer==0.8.2,simplejson==3.19.2,six==1.16.0,stevedore==5.1.0,tabulate==0.9.0,toml==0.10.0,tomli==2.0.1,typing-extensions==4.8.0,tzdata==2023.3,uritemplate==4.1.1,urllib3==2.1.0,wcwidth==0.2.12,wrapt==1.16.0,zipp==3.17.0\npylint run-test-pre: PYTHONHASHSEED='1481622646'\npylint run-test: commands[0] | /home/runner/work/cloud-init/cloud-init/.tox/pylint/bin/python -m pylint cloudinit/ tests/ tools/ conftest.py setup.py\n************* Module tests.unittests.test_net_activators\ntests/unittests/test_net_activators.py:329: [E0213(no-self-argument), TestNetworkManagerActivatorBringUp.fake_isfile_no_nmconn] Method should have \"self\" as first argument\ntests/unittests/test_net_activators.py:330: [E1101(no-member), TestNetworkManagerActivatorBringUp.fake_isfile_no_nmconn] Instance of 'TestNetworkManagerActivatorBringUp' has no 'endswith' member\n\n------------------------------------\nYour code has been rated at 10.00/10\n\nERROR: InvocationError for command /home/runner/work/cloud-init/cloud-init/.tox/pylint/bin/python -m pylint cloudinit/ tests/ tools/ conftest.py setup.py (exited with code 2)\n___________________________________ summary ____________________________________\nERROR: pylint: commands failed\n##[error]Process completed with exit code 1.\n"}], "diff": "diff --git a/tests/unittests/test_net_activators.py b/tests/unittests/test_net_activators.py\nindex 0b3c11c1a..c14425fd6 100644\n--- a/tests/unittests/test_net_activators.py\n+++ b/tests/unittests/test_net_activators.py\n@@ -326,15 +326,12 @@ class TestActivatorsBringDown:\n \n \n class TestNetworkManagerActivatorBringUp:\n- def fake_isfile_no_nmconn(filename):\n- return False if filename.endswith(\".nmconnection\") else True\n-\n @patch(\"cloudinit.subp.subp\", return_value=(\"\", \"\"))\n @patch(\n \"cloudinit.net.network_manager.available_nm_ifcfg_rh\",\n return_value=True,\n )\n- @patch.object(os.path, \"isfile\", side_effect=fake_isfile_no_nmconn)\n+ @patch.object(os.path, \"isfile\")\n @patch(\"os.path.exists\", return_value=True)\n def test_bring_up_interface_no_nm_conn(\n self, m_exists, m_isfile, m_plugin, m_subp\n@@ -344,6 +341,12 @@ class TestNetworkManagerActivatorBringUp:\n present and ifcfg interface config files are also present. In this\n case, we should use ifcfg files.\n \"\"\"\n+\n+ def fake_isfile_no_nmconn(filename):\n+ return False if filename.endswith(\".nmconnection\") else True\n+\n+ m_isfile.side_effect = fake_isfile_no_nmconn\n+\n expected_call_list = [\n (\n (\n@@ -377,6 +380,7 @@ class TestNetworkManagerActivatorBringUp:\n {},\n ),\n ]\n+\n index = 0\n assert NetworkManagerActivator.bring_up_interface(\"eth0\")\n for call in m_subp.call_args_list:\n@@ -388,7 +392,7 @@ class TestNetworkManagerActivatorBringUp:\n \"cloudinit.net.network_manager.available_nm_ifcfg_rh\",\n return_value=False,\n )\n- @patch.object(os.path, \"isfile\", side_effect=fake_isfile_no_nmconn)\n+ @patch.object(os.path, \"isfile\")\n @patch(\"os.path.exists\", return_value=True)\n def test_bring_up_interface_no_plugin_no_nm_conn(\n self, m_exists, m_isfile, m_plugin, m_subp\n@@ -398,6 +402,11 @@ class TestNetworkManagerActivatorBringUp:\n not present. In this case, we can't use ifcfg file and the\n interface bring up should fail.\n \"\"\"\n+\n+ def fake_isfile_no_nmconn(filename):\n+ return False if filename.endswith(\".nmconnection\") else True\n+\n+ m_isfile.side_effect = fake_isfile_no_nmconn\n assert not NetworkManagerActivator.bring_up_interface(\"eth0\")\n \n @patch(\"cloudinit.subp.subp\", return_value=(\"\", \"\"))\n", "difficulty": 1, "changed_files": ["tests/unittests/test_net_activators.py"], "commit_link": "https://github.com/canonical/cloud-init/tree/55d2e8d4abb024997be878797d5625effad65d43"}
data/python/55f8e66.json ADDED
The diff for this file is too large to render. See raw diff
 
data/python/5b9b7a0.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"language": "Python", "id": 57, "repo_owner": "hacs", "repo_name": "integration", "head_branch": "more-proxy-tests", "workflow_name": "Lint", "workflow_filename": "lint.yaml", "workflow_path": ".github/workflows/lint.yaml", "contributor": "hacs", "sha_fail": "5b9b7a0f0f73cc0257f1b41b4904dc9056e9baa1", "sha_success": "04d2ec72235a322abe7140afa8cbbd65fa35090a", "workflow": "name: Lint\n\non:\n pull_request:\n branches:\n - main\n push:\n branches:\n - main\n\nconcurrency:\n group: lint-${{ github.ref }}\n cancel-in-progress: true\n\njobs:\n matrix:\n runs-on: ubuntu-latest\n name: Run ${{ matrix.check }}\n strategy:\n matrix:\n check:\n - pyupgrade\n - black\n - codespell\n - check-executables-have-shebangs\n - check-json\n - requirements-txt-fixer\n - check-ast\n - mixed-line-ending\n steps:\n - name: \ud83d\udce5 Checkout the repository\n uses: actions/[email protected]\n\n - name: \ud83d\udee0\ufe0f Set up Python\n uses: actions/[email protected]\n id: python\n with:\n python-version: \"3.11\"\n\n - name: \ud83d\udce6 Install pre-commit\n run: |\n scripts/install/pip_packages pre-commit\n pre-commit install-hooks --config .github/pre-commit-config.yaml\n\n - name: \ud83c\udfc3 Run the check (${{ matrix.check }})\n run: pre-commit run --hook-stage manual ${{ matrix.check }} --all-files --config .github/pre-commit-config.yaml\n\n lint-json:\n runs-on: ubuntu-latest\n name: With JQ\n steps:\n - name: \ud83d\udce5 Checkout the repository\n uses: actions/[email protected]\n\n - name: \ud83c\udfc3 Run validation\n run: jq -r -e -c . tests/fixtures/*.json", "logs": [{"step_name": "Run pyupgrade/5_\ud83c\udfc3 Run the check (pyupgrade).txt", "log": "##[group]Run pre-commit run --hook-stage manual pyupgrade --all-files --config .github/pre-commit-config.yaml\n\u001b[36;1mpre-commit run --hook-stage manual pyupgrade --all-files --config .github/pre-commit-config.yaml\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.11.7/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.11.7/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.11.7/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.11.7/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.11.7/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.11.7/x64/lib\n##[endgroup]\npyupgrade................................................................Failed\n- hook id: pyupgrade\n- exit code: 1\n- files were modified by this hook\n\nRewriting tests/common.py\n\n##[error]Process completed with exit code 1.\n"}, {"step_name": "Run black/5_\ud83c\udfc3 Run the check (black).txt", "log": "##[group]Run pre-commit run --hook-stage manual black --all-files --config .github/pre-commit-config.yaml\n\u001b[36;1mpre-commit run --hook-stage manual black --all-files --config .github/pre-commit-config.yaml\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.11.7/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.11.7/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.11.7/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.11.7/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.11.7/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.11.7/x64/lib\n##[endgroup]\nblack....................................................................Failed\n- hook id: black\n- files were modified by this hook\n##[error]Process completed with exit code 1.\n"}], "diff": "diff --git a/tests/common.py b/tests/common.py\nindex 190a2fba..ea30646a 100644\n--- a/tests/common.py\n+++ b/tests/common.py\n@@ -13,7 +13,13 @@ from unittest.mock import AsyncMock, Mock, patch\n from aiohttp import ClientSession, ClientWebSocketResponse\n from aiohttp.typedefs import StrOrURL\n from awesomeversion import AwesomeVersion\n-from homeassistant import auth, bootstrap, config_entries, core as ha, config as ha_config\n+from homeassistant import (\n+ auth,\n+ bootstrap,\n+ config as ha_config,\n+ config_entries,\n+ core as ha,\n+)\n from homeassistant.auth import auth_store, models as auth_models\n from homeassistant.const import (\n EVENT_HOMEASSISTANT_CLOSE,\n@@ -49,13 +55,13 @@ TOKEN = \"XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX\"\n INSTANCES = []\n REQUEST_CONTEXT: ContextVar[pytest.FixtureRequest] = ContextVar(\"request_context\", default=None)\n \n-IGNORED_BASE_FILES = set([\n- \"/config/automations.yaml\",\n- \"/config/configuration.yaml\",\n- \"/config/scenes.yaml\",\n- \"/config/scripts.yaml\",\n- \"/config/secrets.yaml\",\n- ])\n+IGNORED_BASE_FILES = {\n+ \"/config/automations.yaml\",\n+ \"/config/configuration.yaml\",\n+ \"/config/scenes.yaml\",\n+ \"/config/scripts.yaml\",\n+ \"/config/secrets.yaml\",\n+}\n \n \n def safe_json_dumps(data: dict | list) -> str:\n", "difficulty": 2, "changed_files": ["tests/common.py"], "commit_link": "https://github.com/hacs/integration/tree/5b9b7a0f0f73cc0257f1b41b4904dc9056e9baa1"}
data/python/5fea24b.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"language": "Python", "id": 48, "repo_owner": "hacs", "repo_name": "integration", "head_branch": "proxy-test", "workflow_name": "Lint", "workflow_filename": "lint.yaml", "workflow_path": ".github/workflows/lint.yaml", "contributor": "hacs", "sha_fail": "5fea24b4a3fc4952e83474db5e7dc05af9ec76f6", "sha_success": "bcf0ea6453a095804fdadff500d023a5a1e2d2c4", "workflow": "name: Lint\n\non:\n pull_request:\n branches:\n - main\n push:\n branches:\n - main\n\nconcurrency:\n group: lint-${{ github.ref }}\n cancel-in-progress: true\n\njobs:\n matrix:\n runs-on: ubuntu-latest\n name: Run ${{ matrix.check }}\n strategy:\n matrix:\n check:\n - pyupgrade\n - black\n - codespell\n - check-executables-have-shebangs\n - check-json\n - requirements-txt-fixer\n - check-ast\n - mixed-line-ending\n steps:\n - name: \ud83d\udce5 Checkout the repository\n uses: actions/[email protected]\n\n - name: \ud83d\udee0\ufe0f Set up Python\n uses: actions/[email protected]\n id: python\n with:\n python-version: \"3.11\"\n\n - name: \ud83d\udce6 Install pre-commit\n run: |\n scripts/install/pip_packages pre-commit\n pre-commit install-hooks --config .github/pre-commit-config.yaml\n\n - name: \ud83c\udfc3 Run the check (${{ matrix.check }})\n run: pre-commit run --hook-stage manual ${{ matrix.check }} --all-files --config .github/pre-commit-config.yaml\n\n lint-json:\n runs-on: ubuntu-latest\n name: With JQ\n steps:\n - name: \ud83d\udce5 Checkout the repository\n uses: actions/[email protected]\n\n - name: \ud83c\udfc3 Run validation\n run: jq -r -e -c . tests/fixtures/*.json", "logs": [{"step_name": "Run pyupgrade/5_\ud83c\udfc3 Run the check (pyupgrade).txt", "log": "##[group]Run pre-commit run --hook-stage manual pyupgrade --all-files --config .github/pre-commit-config.yaml\n\u001b[36;1mpre-commit run --hook-stage manual pyupgrade --all-files --config .github/pre-commit-config.yaml\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.11.6/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.11.6/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.11.6/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.11.6/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.11.6/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.11.6/x64/lib\n##[endgroup]\npyupgrade................................................................Failed\n- hook id: pyupgrade\n- exit code: 1\n- files were modified by this hook\n\nRewriting tests/repositories/test_get_hacs_json.py\nRewriting tests/repositories/test_get_documentation.py\n\n##[error]Process completed with exit code 1.\n"}, {"step_name": "Run black/5_\ud83c\udfc3 Run the check (black).txt", "log": "##[group]Run pre-commit run --hook-stage manual black --all-files --config .github/pre-commit-config.yaml\n\u001b[36;1mpre-commit run --hook-stage manual black --all-files --config .github/pre-commit-config.yaml\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.11.6/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.11.6/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.11.6/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.11.6/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.11.6/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.11.6/x64/lib\n##[endgroup]\nblack....................................................................Failed\n- hook id: black\n- files were modified by this hook\n##[error]Process completed with exit code 1.\n"}], "diff": "diff --git a/tests/fixtures/proxy/raw.githubusercontent.com/octocat/integration/1.0.0/README.md b/tests/fixtures/proxy/raw.githubusercontent.com/octocat/integration/1.0.0/README.md\nindex 547ce4ac..530ac586 100644\n--- a/tests/fixtures/proxy/raw.githubusercontent.com/octocat/integration/1.0.0/README.md\n+++ b/tests/fixtures/proxy/raw.githubusercontent.com/octocat/integration/1.0.0/README.md\n@@ -1 +1 @@\n-## Example readme file\n\\n+## Example readme file (1.0.0)\n\\ndiff --git a/tests/fixtures/proxy/raw.githubusercontent.com/octocat/integration/2.0.0/README.md b/tests/fixtures/proxy/raw.githubusercontent.com/octocat/integration/2.0.0/README.md\nindex 547ce4ac..67d48bfd 100644\n--- a/tests/fixtures/proxy/raw.githubusercontent.com/octocat/integration/2.0.0/README.md\n+++ b/tests/fixtures/proxy/raw.githubusercontent.com/octocat/integration/2.0.0/README.md\n@@ -1 +1 @@\n-## Example readme file\n\\n+## Example readme file (2.0.0)\n\\ndiff --git a/tests/repositories/test_get_documentation.py b/tests/repositories/test_get_documentation.py\nindex e289d295..0b3bcbb6 100644\n--- a/tests/repositories/test_get_documentation.py\n+++ b/tests/repositories/test_get_documentation.py\n@@ -1,19 +1,27 @@\n-\n from typing import Any\n+\n import pytest\n+\n from custom_components.hacs.base import HacsBase\n from custom_components.hacs.repositories.base import HacsRepository\n \n from tests.common import client_session_proxy\n \n \n-\[email protected](\"data,result\", [\n- ({\"installed\": True, \"installed_version\": \"1.0.0\"}, \"Example readme file\"),\n- ({\"installed\": False, \"last_version\": \"2.0.0\"}, \"Example readme file\")\n-])\[email protected](\n+ \"data,result\",\n+ [\n+ ({\"installed\": True, \"installed_version\": \"1.0.0\"}, \"Example readme file (1.0.0)\"),\n+ (\n+ {\"installed\": True, \"installed_version\": \"1.0.0\", \"last_version\": \"2.0.0\"},\n+ \"Example readme file (1.0.0)\",\n+ ),\n+ ({\"installed\": False, \"last_version\": \"2.0.0\"}, \"Example readme file (2.0.0)\"),\n+ ({\"installed\": False, \"last_version\": \"99.99.99\"}, None),\n+ ],\n+)\n @pytest.mark.asyncio\n-async def test_validate_repository(hacs: HacsBase, data: dict[str, Any], result: str):\n+async def test_validate_repository(hacs: HacsBase, data: dict[str, Any], result: str | None):\n repository = HacsRepository(hacs=hacs)\n repository.data.full_name = \"octocat/integration\"\n for key, value in data.items():\n@@ -22,4 +30,7 @@ async def test_validate_repository(hacs: HacsBase, data: dict[str, Any], result:\n hacs.session = await client_session_proxy(hacs.hass)\n docs = await repository.get_documentation(filename=\"README.md\")\n \n- assert result in docs\n+ if result:\n+ assert result in docs\n+ else:\n+ assert result is None\ndiff --git a/tests/repositories/test_get_hacs_json.py b/tests/repositories/test_get_hacs_json.py\nindex 495f346d..a1a5c2eb 100644\n--- a/tests/repositories/test_get_hacs_json.py\n+++ b/tests/repositories/test_get_hacs_json.py\n@@ -1,16 +1,12 @@\n-\n import pytest\n+\n from custom_components.hacs.base import HacsBase\n from custom_components.hacs.repositories.base import HacsRepository\n \n from tests.common import client_session_proxy\n \n \n-\[email protected](\"version,name\", [\n- (\"1.0.0\", \"Proxy integration\"),\n- (\"99.99.99\", None)\n-])\[email protected](\"version,name\", [(\"1.0.0\", \"Proxy integration\"), (\"99.99.99\", None)])\n @pytest.mark.asyncio\n async def test_validate_repository(hacs: HacsBase, version: str, name: str | None):\n repository = HacsRepository(hacs=hacs)\n", "difficulty": 2, "changed_files": ["tests/fixtures/proxy/raw.githubusercontent.com/octocat/integration/1.0.0/README.md", "tests/fixtures/proxy/raw.githubusercontent.com/octocat/integration/2.0.0/README.md", "tests/repositories/test_get_documentation.py", "tests/repositories/test_get_hacs_json.py"], "commit_link": "https://github.com/hacs/integration/tree/5fea24b4a3fc4952e83474db5e7dc05af9ec76f6"}
data/python/616eb3b.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"language": "Python", "id": 19, "repo_owner": "django-import-export", "repo_name": "django-import-export", "head_branch": "remove_count_queries", "workflow_name": ".github/workflows/pre-commit.yml", "workflow_filename": "pre-commit.yml", "workflow_path": ".github/workflows/pre-commit.yml", "contributor": "PetrDlouhy", "sha_fail": "616eb3b10db94cf4a4c209377f36b2ce995bd01c", "sha_success": "b4a4971982af72d8ca11ceed00bee819eb6f2831", "workflow": "on:\n pull_request:\n push:\n branches:\n - main\n\njobs:\n main:\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v3\n - uses: actions/setup-python@v4\n with:\n python-version: 3.x\n - uses: pre-commit/[email protected]\n - uses: pre-commit-ci/[email protected]\n if: always()\n", "logs": [{"step_name": "main/4_Run [email protected]", "log": "##[group]Run pre-commit/[email protected]\nwith:\n extra_args: --all-files\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.12.1/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.12.1/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.1/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.1/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.1/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.12.1/x64/lib\n##[endgroup]\n##[group]Run python -m pip install pre-commit\n\u001b[36;1mpython -m pip install pre-commit\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.12.1/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.12.1/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.1/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.1/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.1/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.12.1/x64/lib\n##[endgroup]\nCollecting pre-commit\n Downloading pre_commit-3.6.0-py2.py3-none-any.whl.metadata (1.3 kB)\nCollecting cfgv>=2.0.0 (from pre-commit)\n Downloading cfgv-3.4.0-py2.py3-none-any.whl.metadata (8.5 kB)\nCollecting identify>=1.0.0 (from pre-commit)\n Downloading identify-2.5.33-py2.py3-none-any.whl.metadata (4.4 kB)\nCollecting nodeenv>=0.11.1 (from pre-commit)\n Downloading nodeenv-1.8.0-py2.py3-none-any.whl.metadata (21 kB)\nCollecting pyyaml>=5.1 (from pre-commit)\n Downloading PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (2.1 kB)\nCollecting virtualenv>=20.10.0 (from pre-commit)\n Downloading virtualenv-20.25.0-py3-none-any.whl.metadata (4.5 kB)\nCollecting setuptools (from nodeenv>=0.11.1->pre-commit)\n Downloading setuptools-69.0.2-py3-none-any.whl.metadata (6.3 kB)\nCollecting distlib<1,>=0.3.7 (from virtualenv>=20.10.0->pre-commit)\n Downloading distlib-0.3.8-py2.py3-none-any.whl.metadata (5.1 kB)\nCollecting filelock<4,>=3.12.2 (from virtualenv>=20.10.0->pre-commit)\n Downloading filelock-3.13.1-py3-none-any.whl.metadata (2.8 kB)\nCollecting platformdirs<5,>=3.9.1 (from virtualenv>=20.10.0->pre-commit)\n Downloading platformdirs-4.1.0-py3-none-any.whl.metadata (11 kB)\nDownloading pre_commit-3.6.0-py2.py3-none-any.whl (204 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 204.0/204.0 kB 17.6 MB/s eta 0:00:00\nDownloading cfgv-3.4.0-py2.py3-none-any.whl (7.2 kB)\nDownloading identify-2.5.33-py2.py3-none-any.whl (98 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 98.9/98.9 kB 25.7 MB/s eta 0:00:00\nDownloading nodeenv-1.8.0-py2.py3-none-any.whl (22 kB)\nDownloading PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (724 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 725.0/725.0 kB 68.9 MB/s eta 0:00:00\nDownloading virtualenv-20.25.0-py3-none-any.whl (3.8 MB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 3.8/3.8 MB 79.8 MB/s eta 0:00:00\nDownloading distlib-0.3.8-py2.py3-none-any.whl (468 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 468.9/468.9 kB 70.8 MB/s eta 0:00:00\nDownloading filelock-3.13.1-py3-none-any.whl (11 kB)\nDownloading platformdirs-4.1.0-py3-none-any.whl (17 kB)\nDownloading setuptools-69.0.2-py3-none-any.whl (819 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 819.5/819.5 kB 80.0 MB/s eta 0:00:00\nInstalling collected packages: distlib, setuptools, pyyaml, platformdirs, identify, filelock, cfgv, virtualenv, nodeenv, pre-commit\nSuccessfully installed cfgv-3.4.0 distlib-0.3.8 filelock-3.13.1 identify-2.5.33 nodeenv-1.8.0 platformdirs-4.1.0 pre-commit-3.6.0 pyyaml-6.0.1 setuptools-69.0.2 virtualenv-20.25.0\n##[group]Run python -m pip freeze --local\n\u001b[36;1mpython -m pip freeze --local\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.12.1/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.12.1/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.1/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.1/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.1/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.12.1/x64/lib\n##[endgroup]\ncfgv==3.4.0\ndistlib==0.3.8\nfilelock==3.13.1\nidentify==2.5.33\nnodeenv==1.8.0\nplatformdirs==4.1.0\npre-commit==3.6.0\nPyYAML==6.0.1\nsetuptools==69.0.2\nvirtualenv==20.25.0\n##[group]Run actions/cache@v3\nwith:\n path: ~/.cache/pre-commit\n key: pre-commit-3|/opt/hostedtoolcache/Python/3.12.1/x64|78083b3309c934e46e7d0a952b7a4c6dc6df8860edaa74474d06dd8c36e1b0a2\n enableCrossOsArchive: false\n fail-on-cache-miss: false\n lookup-only: false\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.12.1/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.12.1/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.1/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.1/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.1/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.12.1/x64/lib\n##[endgroup]\nCache Size: ~15 MB (15847689 B)\n[command]/usr/bin/tar -xf /home/runner/work/_temp/0406f221-9418-4459-ac5c-6e0b0097f5b8/cache.tzst -P -C /home/runner/work/django-import-export/django-import-export --use-compress-program unzstd\nCache restored successfully\nCache restored from key: pre-commit-3|/opt/hostedtoolcache/Python/3.12.1/x64|78083b3309c934e46e7d0a952b7a4c6dc6df8860edaa74474d06dd8c36e1b0a2\n##[group]Run pre-commit run --show-diff-on-failure --color=always --all-files\n\u001b[36;1mpre-commit run --show-diff-on-failure --color=always --all-files\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.12.1/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.12.1/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.1/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.1/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.1/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.12.1/x64/lib\n##[endgroup]\nblack....................................................................\u001b[41mFailed\u001b[m\n\u001b[2m- hook id: black\u001b[m\n\u001b[2m- files were modified by this hook\u001b[m\n\n\u001b[1mreformatted tests/core/tests/test_admin_integration.py\u001b[0m\n\n\u001b[1mAll done! \u2728 \ud83c\udf70 \u2728\u001b[0m\n\u001b[34m\u001b[1m1 file \u001b[0m\u001b[1mreformatted\u001b[0m, \u001b[34m56 files \u001b[0mleft unchanged.\n\nisort....................................................................\u001b[42mPassed\u001b[m\nflake8...................................................................\u001b[42mPassed\u001b[m\npre-commit hook(s) made changes.\nIf you are seeing this message in CI, reproduce locally with: `pre-commit run --all-files`.\nTo run `pre-commit` as part of git workflow, use `pre-commit install`.\nAll changes made by hooks:\n\u001b[1mdiff --git a/tests/core/tests/test_admin_integration.py b/tests/core/tests/test_admin_integration.py\u001b[m\n\u001b[1mindex c178c89..e74a7d2 100644\u001b[m\n\u001b[1m--- a/tests/core/tests/test_admin_integration.py\u001b[m\n\u001b[1m+++ b/tests/core/tests/test_admin_integration.py\u001b[m\n\u001b[36m@@ -680,7 +680,9 @@\u001b[m \u001b[mclass ExportAdminIntegrationTest(AdminTestMixin, TestCase):\u001b[m\n \"file_format\": \"0\",\u001b[m\n }\u001b[m\n date_str = datetime.now().strftime(\"%Y-%m-%d\")\u001b[m\n\u001b[31m- with self.assertNumQueries(7): # Should not contain COUNT queries from ModelAdmin.get_results()\u001b[m\n\u001b[32m+\u001b[m\u001b[32m with self.assertNumQueries(\u001b[m\n\u001b[32m+\u001b[m\u001b[32m 7\u001b[m\n\u001b[32m+\u001b[m\u001b[32m ): # Should not contain COUNT queries from ModelAdmin.get_results()\u001b[m\n response = self.client.post(\"/admin/core/book/export/\", data)\u001b[m\n self.assertEqual(response.status_code, 200)\u001b[m\n self.assertTrue(response.has_header(\"Content-Disposition\"))\u001b[m\n##[error]Process completed with exit code 1.\n"}], "diff": "diff --git a/tests/core/tests/test_admin_integration.py b/tests/core/tests/test_admin_integration.py\nindex c178c89e..e74a7d29 100644\n--- a/tests/core/tests/test_admin_integration.py\n+++ b/tests/core/tests/test_admin_integration.py\n@@ -680,7 +680,9 @@ class ExportAdminIntegrationTest(AdminTestMixin, TestCase):\n \"file_format\": \"0\",\n }\n date_str = datetime.now().strftime(\"%Y-%m-%d\")\n- with self.assertNumQueries(7): # Should not contain COUNT queries from ModelAdmin.get_results()\n+ with self.assertNumQueries(\n+ 7\n+ ): # Should not contain COUNT queries from ModelAdmin.get_results()\n response = self.client.post(\"/admin/core/book/export/\", data)\n self.assertEqual(response.status_code, 200)\n self.assertTrue(response.has_header(\"Content-Disposition\"))\n", "difficulty": 0, "changed_files": ["tests/core/tests/test_admin_integration.py"], "commit_link": "https://github.com/django-import-export/django-import-export/tree/616eb3b10db94cf4a4c209377f36b2ce995bd01c"}
data/python/63ae862.json ADDED
The diff for this file is too large to render. See raw diff
 
data/python/655e964.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"language": "Python", "id": 44, "repo_owner": "fortra", "repo_name": "impacket", "head_branch": "configurable_socks_address", "workflow_name": "Build and test Impacket", "workflow_filename": "build_and_test.yml", "workflow_path": ".github/workflows/build_and_test.yml", "contributor": "rtpt-erikgeiser", "sha_fail": "655e964d0122833acd1f34aca8844e3db3dc5583", "sha_success": "72df5fb660a36f0638a004aeb139995434a5f585", "workflow": "# GitHub Action workflow to build and run Impacket's tests\n#\n\nname: Build and test Impacket\n\non: [push, pull_request]\n\nenv:\n DOCKER_TAG: impacket:latests\n\njobs:\n lint:\n name: Check syntax errors and warnings\n runs-on: ubuntu-latest\n if:\n github.event_name == 'push' || github.event.pull_request.head.repo.full_name !=\n github.repository\n\n steps:\n - name: Checkout Impacket\n uses: actions/checkout@v3\n\n - name: Setup Python 3.8\n uses: actions/setup-python@v4\n with:\n python-version: 3.8\n\n - name: Install Python dependencies\n run: |\n python -m pip install flake8\n\n - name: Check syntax errors\n run: |\n flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics\n\n - name: Check PEP8 warnings\n run: |\n flake8 . --count --ignore=E1,E2,E3,E501,W291,W293 --exit-zero --max-complexity=65 --max-line-length=127 --statistics\n\n test:\n name: Run unit tests and build wheel\n needs: lint\n runs-on: ${{ matrix.os }}\n if:\n github.event_name == 'push' || github.event.pull_request.head.repo.full_name !=\n github.repository\n\n strategy:\n fail-fast: false\n matrix:\n python-version: [\"3.7\", \"3.8\", \"3.9\", \"3.10\"]\n experimental: [false]\n os: [ubuntu-latest]\n include:\n - python-version: \"3.6\"\n experimental: false\n os: ubuntu-20.04\n - python-version: \"3.11-dev\"\n experimental: true\n os: ubuntu-latest\n continue-on-error: ${{ matrix.experimental }}\n\n steps:\n - name: Checkout Impacket\n uses: actions/checkout@v3\n\n - name: Setup Python ${{ matrix.python-version }}\n uses: actions/setup-python@v4\n with:\n python-version: ${{ matrix.python-version }}\n\n - name: Install Python dependencies\n run: |\n python -m pip install --upgrade pip wheel\n python -m pip install tox tox-gh-actions -r requirements.txt -r requirements-test.txt\n\n - name: Run unit tests\n run: |\n tox -- -m 'not remote'\n\n - name: Build wheel artifact\n run: |\n python setup.py bdist_wheel\n\n docker:\n name: Build docker image\n needs: lint\n runs-on: ubuntu-latest\n if:\n github.event_name == 'push' || github.event.pull_request.head.repo.full_name !=\n github.repository\n\n continue-on-error: true\n steps:\n - name: Checkout Impacket\n uses: actions/checkout@v3\n\n - name: Build docker image\n run: |\n docker build -t ${{ env.DOCKER_TAG }} .\n", "logs": [{"step_name": "Check syntax errors and warnings/5_Check syntax errors.txt", "log": "##[group]Run flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics\n\u001b[36;1mflake8 . --count --select=E9,F63,F7,F82 --show-source --statistics\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n DOCKER_TAG: impacket:latests\n pythonLocation: /opt/hostedtoolcache/Python/3.8.18/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.8.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.8.18/x64/lib\n##[endgroup]\n./impacket/examples/ntlmrelayx/servers/socksserver.py:463:18: E999 SyntaxError: non-default argument follows default argument\n def __init__(self, server_address=('127.0.0.1', 1080), handler_class=SocksRequestHandler, api_port):\n ^\n1 E999 SyntaxError: non-default argument follows default argument\n1\n##[error]Process completed with exit code 1.\n"}], "diff": "diff --git a/impacket/examples/ntlmrelayx/servers/socksserver.py b/impacket/examples/ntlmrelayx/servers/socksserver.py\nindex 53f8cd5b..43bf6c63 100644\n--- a/impacket/examples/ntlmrelayx/servers/socksserver.py\n+++ b/impacket/examples/ntlmrelayx/servers/socksserver.py\n@@ -460,7 +460,7 @@ class SocksRequestHandler(socketserver.BaseRequestHandler):\n \n \n class SOCKS(socketserver.ThreadingMixIn, socketserver.TCPServer):\n- def __init__(self, server_address=('127.0.0.1', 1080), handler_class=SocksRequestHandler, api_port):\n+ def __init__(self, server_address=('127.0.0.1', 1080), handler_class=SocksRequestHandler, api_port=9090):\n LOG.info('SOCKS proxy started. Listening on %s:%d', server_address[0], server_address[1])\n \n self.activeRelays = {}\n", "difficulty": 2, "changed_files": ["impacket/examples/ntlmrelayx/servers/socksserver.py"], "commit_link": "https://github.com/fortra/impacket/tree/655e964d0122833acd1f34aca8844e3db3dc5583"}
data/python/66e3eac.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"language": "Python", "id": 64, "repo_owner": "encode", "repo_name": "uvicorn", "head_branch": "dependabot/pip/python-packages-6ae2cf5cd5", "workflow_name": "Test Suite", "workflow_filename": "test-suite.yml", "workflow_path": ".github/workflows/test-suite.yml", "contributor": "encode", "sha_fail": "66e3eac43e68ce4632670d4addd75cfc4c8de0a1", "sha_success": "b5084dc6bf7b04b9f06283baab794cdafcfe0931", "workflow": "---\nname: Test Suite\n\non:\n push:\n branches: [\"master\"]\n pull_request:\n branches: [\"master\"]\n\njobs:\n tests:\n name: \"Python ${{ matrix.python-version }} ${{ matrix.os }}\"\n runs-on: \"${{ matrix.os }}\"\n strategy:\n matrix:\n python-version: [\"3.8\", \"3.9\", \"3.10\", \"3.11\", \"3.12\"]\n os: [windows-latest, ubuntu-latest, macos-latest]\n steps:\n - uses: \"actions/checkout@v4\"\n - uses: \"actions/setup-python@v5\"\n with:\n python-version: \"${{ matrix.python-version }}\"\n - name: \"Install dependencies\"\n run: \"scripts/install\"\n shell: bash\n - name: \"Run linting checks\"\n run: \"scripts/check\"\n shell: bash\n if: \"${{ matrix.os == 'ubuntu-latest'}}\"\n - name: \"Build package & docs\"\n run: \"scripts/build\"\n shell: bash\n - name: \"Run tests\"\n run: \"scripts/test\"\n shell: bash\n - name: \"Enforce coverage\"\n run: \"scripts/coverage\"\n shell: bash\n", "logs": [{"step_name": "Python 3.8 ubuntu-latest/5_Run linting checks.txt", "log": "##[group]Run scripts/check\n\u001b[36;1mscripts/check\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.8.18/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.8.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.8.18/x64/lib\n##[endgroup]\n+ ./scripts/sync-version\n+ ruff format --check --diff uvicorn tests\n66 files already formatted\n+ mypy uvicorn tests\ntests/middleware/test_wsgi.py:53: error: List item 1 has incompatible type \"Type[WSGIMiddleware]\"; expected \"Type[object]\" [list-item]\nFound 1 error in 1 file (checked 66 source files)\n##[error]Process completed with exit code 1.\n"}, {"step_name": "Python 3.9 ubuntu-latest/5_Run linting checks.txt", "log": "##[group]Run scripts/check\n\u001b[36;1mscripts/check\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.9.18/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib\n##[endgroup]\n+ ./scripts/sync-version\n+ ruff format --check --diff uvicorn tests\n66 files already formatted\n+ mypy uvicorn tests\ntests/middleware/test_wsgi.py:53: error: List item 1 has incompatible type \"type[WSGIMiddleware]\"; expected \"type[object]\" [list-item]\nFound 1 error in 1 file (checked 66 source files)\n##[error]Process completed with exit code 1.\n"}, {"step_name": "Python 3.10 ubuntu-latest/5_Run linting checks.txt", "log": "##[group]Run scripts/check\n\u001b[36;1mscripts/check\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.10.13/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.10.13/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.10.13/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.10.13/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.10.13/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.10.13/x64/lib\n##[endgroup]\n+ ./scripts/sync-version\n+ ruff format --check --diff uvicorn tests\n66 files already formatted\n+ mypy uvicorn tests\ntests/middleware/test_wsgi.py:53: error: List item 1 has incompatible type \"type[WSGIMiddleware]\"; expected \"type[object]\" [list-item]\nFound 1 error in 1 file (checked 66 source files)\n##[error]Process completed with exit code 1.\n"}, {"step_name": "Python 3.11 ubuntu-latest/5_Run linting checks.txt", "log": "##[group]Run scripts/check\n\u001b[36;1mscripts/check\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.11.7/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.11.7/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.11.7/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.11.7/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.11.7/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.11.7/x64/lib\n##[endgroup]\n+ ./scripts/sync-version\n+ ruff format --check --diff uvicorn tests\n66 files already formatted\n+ mypy uvicorn tests\ntests/middleware/test_wsgi.py:53: error: List item 1 has incompatible type \"type[WSGIMiddleware]\"; expected \"type[object]\" [list-item]\nFound 1 error in 1 file (checked 66 source files)\n##[error]Process completed with exit code 1.\n"}], "diff": "diff --git a/tests/middleware/test_wsgi.py b/tests/middleware/test_wsgi.py\nindex 34730ec..00ff40c 100644\n--- a/tests/middleware/test_wsgi.py\n+++ b/tests/middleware/test_wsgi.py\n@@ -50,7 +50,7 @@ def return_exc_info(environ: Environ, start_response: StartResponse) -> List[byt\n return [output]\n \n \[email protected](params=[wsgi._WSGIMiddleware, a2wsgi.WSGIMiddleware])\[email protected](params=[wsgi._WSGIMiddleware, a2wsgi.WSGIMiddleware]) # type: ignore\n def wsgi_middleware(request: pytest.FixtureRequest) -> Callable:\n return request.param\n \n", "difficulty": 1, "changed_files": ["tests/middleware/test_wsgi.py"], "commit_link": "https://github.com/encode/uvicorn/tree/66e3eac43e68ce4632670d4addd75cfc4c8de0a1"}
data/python/6819090.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"language": "Python", "id": 31, "repo_owner": "pymc-devs", "repo_name": "pymc", "head_branch": "ruff_linter", "workflow_name": "pre-commit", "workflow_filename": "pre-commit.yml", "workflow_path": ".github/workflows/pre-commit.yml", "contributor": "juanitorduz", "sha_fail": "6819090f9d2e65d80b6a257d63ea8bdea4900689", "sha_success": "c58501bed6eec60ded25671e28e627897d6bdc14", "workflow": "name: pre-commit\n\non:\n pull_request:\n push:\n branches: [main]\n\njobs:\n pre-commit:\n runs-on: ubuntu-latest\n env:\n SKIP: no-commit-to-branch\n steps:\n - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11\n - uses: actions/setup-python@v5\n with:\n python-version: \"3.9\" # Run pre-commit on oldest supported Python version\n - uses: pre-commit/[email protected]\n mypy:\n runs-on: ubuntu-latest\n defaults:\n run:\n shell: bash -l {0}\n steps:\n - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11\n - name: Cache conda\n uses: actions/cache@v3\n env:\n # Increase this value to reset cache if environment-test.yml has not changed\n CACHE_NUMBER: 0\n with:\n path: ~/conda_pkgs_dir\n key: ${{ runner.os }}-py39-conda-${{ env.CACHE_NUMBER }}-${{\n hashFiles('conda-envs/environment-test.yml') }}\n - name: Cache multiple paths\n uses: actions/cache@v3\n env:\n # Increase this value to reset cache if requirements.txt has not changed\n CACHE_NUMBER: 0\n with:\n path: |\n ~/.cache/pip\n $RUNNER_TOOL_CACHE/Python/*\n ~\\AppData\\Local\\pip\\Cache\n key: ${{ runner.os }}-build-${{ matrix.python-version }}-${{ env.CACHE_NUMBER }}-${{\n hashFiles('requirements.txt') }}\n - uses: conda-incubator/setup-miniconda@v2\n with:\n miniforge-variant: Mambaforge\n miniforge-version: latest\n mamba-version: \"*\"\n activate-environment: pymc-test\n channel-priority: strict\n environment-file: conda-envs/environment-test.yml\n python-version: \"3.9\" # Run pre-commit on oldest supported Python version\n use-mamba: true\n use-only-tar-bz2: false # IMPORTANT: This may break caching of conda packages! See https://github.com/conda-incubator/setup-miniconda/issues/267\n - name: Install-pymc and mypy dependencies\n run: |\n conda activate pymc-test\n pip install -e .\n pip install --pre -U polyagamma\n python --version\n - name: Run mypy\n run: |\n conda activate pymc-test\n python ./scripts/run_mypy.py --verbose\n", "logs": [{"step_name": "pre-commit/4_Run [email protected]", "log": "##[group]Run pre-commit/[email protected]\nwith:\n extra_args: --all-files\nenv:\n SKIP: no-commit-to-branch\n pythonLocation: /opt/hostedtoolcache/Python/3.9.18/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib\n##[endgroup]\n##[group]Run python -m pip install pre-commit\n\u001b[36;1mpython -m pip install pre-commit\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\nenv:\n SKIP: no-commit-to-branch\n pythonLocation: /opt/hostedtoolcache/Python/3.9.18/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib\n##[endgroup]\nCollecting pre-commit\n Downloading pre_commit-3.6.0-py2.py3-none-any.whl.metadata (1.3 kB)\nCollecting cfgv>=2.0.0 (from pre-commit)\n Downloading cfgv-3.4.0-py2.py3-none-any.whl.metadata (8.5 kB)\nCollecting identify>=1.0.0 (from pre-commit)\n Downloading identify-2.5.33-py2.py3-none-any.whl.metadata (4.4 kB)\nCollecting nodeenv>=0.11.1 (from pre-commit)\n Downloading nodeenv-1.8.0-py2.py3-none-any.whl.metadata (21 kB)\nCollecting pyyaml>=5.1 (from pre-commit)\n Downloading PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (2.1 kB)\nCollecting virtualenv>=20.10.0 (from pre-commit)\n Downloading virtualenv-20.25.0-py3-none-any.whl.metadata (4.5 kB)\nRequirement already satisfied: setuptools in /opt/hostedtoolcache/Python/3.9.18/x64/lib/python3.9/site-packages (from nodeenv>=0.11.1->pre-commit) (58.1.0)\nCollecting distlib<1,>=0.3.7 (from virtualenv>=20.10.0->pre-commit)\n Downloading distlib-0.3.8-py2.py3-none-any.whl.metadata (5.1 kB)\nCollecting filelock<4,>=3.12.2 (from virtualenv>=20.10.0->pre-commit)\n Downloading filelock-3.13.1-py3-none-any.whl.metadata (2.8 kB)\nCollecting platformdirs<5,>=3.9.1 (from virtualenv>=20.10.0->pre-commit)\n Downloading platformdirs-4.1.0-py3-none-any.whl.metadata (11 kB)\nDownloading pre_commit-3.6.0-py2.py3-none-any.whl (204 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 204.0/204.0 kB 15.0 MB/s eta 0:00:00\nDownloading cfgv-3.4.0-py2.py3-none-any.whl (7.2 kB)\nDownloading identify-2.5.33-py2.py3-none-any.whl (98 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 98.9/98.9 kB 29.3 MB/s eta 0:00:00\nDownloading nodeenv-1.8.0-py2.py3-none-any.whl (22 kB)\nDownloading PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (738 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 738.9/738.9 kB 62.7 MB/s eta 0:00:00\nDownloading virtualenv-20.25.0-py3-none-any.whl (3.8 MB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 3.8/3.8 MB 94.8 MB/s eta 0:00:00\nDownloading distlib-0.3.8-py2.py3-none-any.whl (468 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 468.9/468.9 kB 74.7 MB/s eta 0:00:00\nDownloading filelock-3.13.1-py3-none-any.whl (11 kB)\nDownloading platformdirs-4.1.0-py3-none-any.whl (17 kB)\nInstalling collected packages: distlib, pyyaml, platformdirs, nodeenv, identify, filelock, cfgv, virtualenv, pre-commit\nSuccessfully installed cfgv-3.4.0 distlib-0.3.8 filelock-3.13.1 identify-2.5.33 nodeenv-1.8.0 platformdirs-4.1.0 pre-commit-3.6.0 pyyaml-6.0.1 virtualenv-20.25.0\n\n[notice] A new release of pip is available: 23.0.1 -> 23.3.2\n[notice] To update, run: pip install --upgrade pip\n##[group]Run python -m pip freeze --local\n\u001b[36;1mpython -m pip freeze --local\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\nenv:\n SKIP: no-commit-to-branch\n pythonLocation: /opt/hostedtoolcache/Python/3.9.18/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib\n##[endgroup]\ncfgv==3.4.0\ndistlib==0.3.8\nfilelock==3.13.1\nidentify==2.5.33\nnodeenv==1.8.0\nplatformdirs==4.1.0\npre-commit==3.6.0\nPyYAML==6.0.1\nvirtualenv==20.25.0\n##[group]Run actions/cache@v3\nwith:\n path: ~/.cache/pre-commit\n key: pre-commit-3|/opt/hostedtoolcache/Python/3.9.18/x64|85a42e0c0d7ae6174d79f49ec88a69ec255502eb51f27814478804e4cfd430f9\n enableCrossOsArchive: false\n fail-on-cache-miss: false\n lookup-only: false\nenv:\n SKIP: no-commit-to-branch\n pythonLocation: /opt/hostedtoolcache/Python/3.9.18/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib\n##[endgroup]\nCache Size: ~74 MB (77731725 B)\n[command]/usr/bin/tar -xf /home/runner/work/_temp/64e9f7f2-65c6-4b3f-bffb-599fde6080b8/cache.tzst -P -C /home/runner/work/pymc/pymc --use-compress-program unzstd\nReceived 77731725 of 77731725 (100.0%), 74.0 MBs/sec\nCache restored successfully\nCache restored from key: pre-commit-3|/opt/hostedtoolcache/Python/3.9.18/x64|85a42e0c0d7ae6174d79f49ec88a69ec255502eb51f27814478804e4cfd430f9\n##[group]Run pre-commit run --show-diff-on-failure --color=always --all-files\n\u001b[36;1mpre-commit run --show-diff-on-failure --color=always --all-files\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\nenv:\n SKIP: no-commit-to-branch\n pythonLocation: /opt/hostedtoolcache/Python/3.9.18/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib\n##[endgroup]\ncheck for merge conflicts............................................................\u001b[42mPassed\u001b[m\ncheck toml...........................................................................\u001b[42mPassed\u001b[m\ncheck yaml...........................................................................\u001b[42mPassed\u001b[m\ndebug statements (python)............................................................\u001b[42mPassed\u001b[m\nfix end of files.....................................................................\u001b[42mPassed\u001b[m\ndon't commit to branch..............................................................\u001b[43;30mSkipped\u001b[m\nfix requirements.txt.................................................................\u001b[42mPassed\u001b[m\ntrim trailing whitespace.............................................................\u001b[42mPassed\u001b[m\nApply Apache 2.0 License.............................................................\u001b[42mPassed\u001b[m\npyupgrade............................................................................\u001b[42mPassed\u001b[m\nruff.................................................................................\u001b[41mFailed\u001b[m\n\u001b[2m- hook id: ruff\u001b[m\n\u001b[2m- files were modified by this hook\u001b[m\n\nFound 5 errors (5 fixed, 0 remaining).\n\nruff-format..........................................................................\u001b[42mPassed\u001b[m\npydocstyle...........................................................................\u001b[42mPassed\u001b[m\nDisallow print statements............................................................\u001b[42mPassed\u001b[m\nCheck no tests are ignored...........................................................\u001b[42mPassed\u001b[m\nGenerate pip dependency from conda...................................................\u001b[42mPassed\u001b[m\nNo relative imports..................................................................\u001b[42mPassed\u001b[m\nCheck no links that should be cross-references are in the docs.......................\u001b[42mPassed\u001b[m\npre-commit hook(s) made changes.\nIf you are seeing this message in CI, reproduce locally with: `pre-commit run --all-files`.\nTo run `pre-commit` as part of git workflow, use `pre-commit install`.\nAll changes made by hooks:\n\u001b[1mdiff --git a/docs/source/conf.py b/docs/source/conf.py\u001b[m\n\u001b[1mindex 9ef1d52..4dba6d9 100755\u001b[m\n\u001b[1m--- a/docs/source/conf.py\u001b[m\n\u001b[1m+++ b/docs/source/conf.py\u001b[m\n\u001b[36m@@ -16,6 +16,7 @@\u001b[m\n # serve to show the default.\u001b[m\n \u001b[m\n import os\u001b[m\n\u001b[32m+\u001b[m\n from pathlib import Path\u001b[m\n \u001b[m\n import pymc\u001b[m\n\u001b[1mdiff --git a/pymc/distributions/multivariate.py b/pymc/distributions/multivariate.py\u001b[m\n\u001b[1mindex 570c139..03ea4f3 100644\u001b[m\n\u001b[1m--- a/pymc/distributions/multivariate.py\u001b[m\n\u001b[1m+++ b/pymc/distributions/multivariate.py\u001b[m\n\u001b[36m@@ -30,9 +30,8 @@\u001b[m \u001b[mfrom pytensor.graph.op import Op\u001b[m\n from pytensor.raise_op import Assert\u001b[m\n from pytensor.sparse.basic import sp_sum\u001b[m\n from pytensor.tensor import TensorConstant, gammaln, sigmoid\u001b[m\n\u001b[31m-from pytensor.tensor.linalg import cholesky, det, eigh\u001b[m\n\u001b[32m+\u001b[m\u001b[32mfrom pytensor.tensor.linalg import cholesky, det, eigh, solve_triangular, trace\u001b[m\n from pytensor.tensor.linalg import inv as matrix_inverse\u001b[m\n\u001b[31m-from pytensor.tensor.linalg import solve_triangular, trace\u001b[m\n from pytensor.tensor.random.basic import dirichlet, multinomial, multivariate_normal\u001b[m\n from pytensor.tensor.random.op import RandomVariable\u001b[m\n from pytensor.tensor.random.utils import (\u001b[m\n\u001b[1mdiff --git a/pymc/distributions/transforms.py b/pymc/distributions/transforms.py\u001b[m\n\u001b[1mindex 3e2a5a0..7c0d971 100644\u001b[m\n\u001b[1m--- a/pymc/distributions/transforms.py\u001b[m\n\u001b[1m+++ b/pymc/distributions/transforms.py\u001b[m\n\u001b[36m@@ -18,6 +18,7 @@\u001b[m \u001b[mfrom functools import singledispatch\u001b[m\n import numpy as np\u001b[m\n import pytensor.tensor as pt\u001b[m\n \u001b[m\n\u001b[32m+\u001b[m\n # ignore mypy error because it somehow considers that\u001b[m\n # \"numpy.core.numeric has no attribute normalize_axis_tuple\"\u001b[m\n from numpy.core.numeric import normalize_axis_tuple # type: ignore\u001b[m\n\u001b[1mdiff --git a/pymc/math.py b/pymc/math.py\u001b[m\n\u001b[1mindex ddfcaa4..7c9ceaa 100644\u001b[m\n\u001b[1m--- a/pymc/math.py\u001b[m\n\u001b[1m+++ b/pymc/math.py\u001b[m\n\u001b[36m@@ -27,7 +27,6 @@\u001b[m \u001b[mimport scipy.sparse\u001b[m\n \u001b[m\n from pytensor.graph.basic import Apply\u001b[m\n from pytensor.graph.op import Op\u001b[m\n\u001b[31m-\u001b[m\n from pytensor.tensor import (\u001b[m\n abs,\u001b[m\n and_,\u001b[m\n\u001b[36m@@ -98,7 +97,6 @@\u001b[m \u001b[mfrom scipy.linalg import block_diag as scipy_block_diag\u001b[m\n \u001b[m\n from pymc.pytensorf import floatX, ix_, largest_common_dtype\u001b[m\n \u001b[m\n\u001b[31m-\u001b[m\n __all__ = [\u001b[m\n \"abs\",\u001b[m\n \"and_\",\u001b[m\n\u001b[1mdiff --git a/tests/distributions/test_mixture.py b/tests/distributions/test_mixture.py\u001b[m\n\u001b[1mindex 8dfa5a8..be3e1bc 100644\u001b[m\n\u001b[1m--- a/tests/distributions/test_mixture.py\u001b[m\n\u001b[1m+++ b/tests/distributions/test_mixture.py\u001b[m\n\u001b[36m@@ -14,7 +14,6 @@\u001b[m\n \u001b[m\n import warnings\u001b[m\n \u001b[m\n\u001b[31m-\u001b[m\n import numpy as np\u001b[m\n import pytensor\u001b[m\n import pytest\u001b[m\n##[error]Process completed with exit code 1.\n"}], "diff": "diff --git a/docs/source/conf.py b/docs/source/conf.py\nindex 9ef1d527a..4dba6d9f7 100755\n--- a/docs/source/conf.py\n+++ b/docs/source/conf.py\n@@ -16,6 +16,7 @@\n # serve to show the default.\n \n import os\n+\n from pathlib import Path\n \n import pymc\ndiff --git a/pymc/distributions/multivariate.py b/pymc/distributions/multivariate.py\nindex 570c13988..03ea4f3a5 100644\n--- a/pymc/distributions/multivariate.py\n+++ b/pymc/distributions/multivariate.py\n@@ -30,9 +30,8 @@ from pytensor.graph.op import Op\n from pytensor.raise_op import Assert\n from pytensor.sparse.basic import sp_sum\n from pytensor.tensor import TensorConstant, gammaln, sigmoid\n-from pytensor.tensor.linalg import cholesky, det, eigh\n+from pytensor.tensor.linalg import cholesky, det, eigh, solve_triangular, trace\n from pytensor.tensor.linalg import inv as matrix_inverse\n-from pytensor.tensor.linalg import solve_triangular, trace\n from pytensor.tensor.random.basic import dirichlet, multinomial, multivariate_normal\n from pytensor.tensor.random.op import RandomVariable\n from pytensor.tensor.random.utils import (\ndiff --git a/pymc/distributions/transforms.py b/pymc/distributions/transforms.py\nindex 3e2a5a069..7c0d97100 100644\n--- a/pymc/distributions/transforms.py\n+++ b/pymc/distributions/transforms.py\n@@ -18,6 +18,7 @@ from functools import singledispatch\n import numpy as np\n import pytensor.tensor as pt\n \n+\n # ignore mypy error because it somehow considers that\n # \"numpy.core.numeric has no attribute normalize_axis_tuple\"\n from numpy.core.numeric import normalize_axis_tuple # type: ignore\ndiff --git a/pymc/math.py b/pymc/math.py\nindex ddfcaa44d..7c9ceaa9e 100644\n--- a/pymc/math.py\n+++ b/pymc/math.py\n@@ -27,7 +27,6 @@ import scipy.sparse\n \n from pytensor.graph.basic import Apply\n from pytensor.graph.op import Op\n-\n from pytensor.tensor import (\n abs,\n and_,\n@@ -98,7 +97,6 @@ from scipy.linalg import block_diag as scipy_block_diag\n \n from pymc.pytensorf import floatX, ix_, largest_common_dtype\n \n-\n __all__ = [\n \"abs\",\n \"and_\",\ndiff --git a/tests/distributions/test_mixture.py b/tests/distributions/test_mixture.py\nindex 8dfa5a8ca..be3e1bcbd 100644\n--- a/tests/distributions/test_mixture.py\n+++ b/tests/distributions/test_mixture.py\n@@ -14,7 +14,6 @@\n \n import warnings\n \n-\n import numpy as np\n import pytensor\n import pytest\n", "difficulty": 0, "changed_files": ["docs/source/conf.py", "pymc/distributions/multivariate.py", "pymc/distributions/transforms.py", "pymc/math.py", "tests/distributions/test_mixture.py"], "commit_link": "https://github.com/pymc-devs/pymc/tree/6819090f9d2e65d80b6a257d63ea8bdea4900689"}
data/python/68ddb25.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"language": "Python", "id": 22, "repo_owner": "huggingface", "repo_name": "diffusers", "head_branch": "refactor-single-file", "workflow_name": "Run code quality checks", "workflow_filename": "pr_quality.yml", "workflow_path": ".github/workflows/pr_quality.yml", "contributor": "huggingface", "sha_fail": "68ddb2559e616656301858d441a523ebd64a710f", "sha_success": "7395283f23a66b634d0d316fdceadf17ed5f4f97", "workflow": "name: Run code quality checks\n\non:\n pull_request:\n branches:\n - main\n push:\n branches:\n - main\n\nconcurrency:\n group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}\n cancel-in-progress: true\n\njobs:\n check_code_quality:\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v3\n - name: Set up Python\n uses: actions/setup-python@v4\n with:\n python-version: \"3.8\"\n - name: Install dependencies\n run: |\n python -m pip install --upgrade pip\n pip install .[quality]\n - name: Check quality\n run: |\n ruff check examples tests src utils scripts\n ruff format examples tests src utils scripts --check\n\n check_repository_consistency:\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v3\n - name: Set up Python\n uses: actions/setup-python@v4\n with:\n python-version: \"3.8\"\n - name: Install dependencies\n run: |\n python -m pip install --upgrade pip\n pip install .[quality]\n - name: Check quality\n run: |\n python utils/check_copies.py\n python utils/check_dummies.py\n make deps_table_check_updated\n", "logs": [{"step_name": "check_code_quality/5_Check quality.txt", "log": "##[group]Run ruff check examples tests src utils scripts\n\u001b[36;1mruff check examples tests src utils scripts\u001b[0m\n\u001b[36;1mruff format examples tests src utils scripts --check\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.8.18/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.8.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.8.18/x64/lib\n##[endgroup]\nsrc/diffusers/loaders/single_file_utils.py:23:8: F401 [*] `torch` imported but unused\nsrc/diffusers/loaders/single_file_utils.py:25:44: F401 [*] `safetensors.torch.load_file` imported but unused\nFound 2 errors.\n[*] 2 fixable with the `--fix` option.\n##[error]Process completed with exit code 1.\n"}], "diff": "diff --git a/src/diffusers/loaders/single_file_utils.py b/src/diffusers/loaders/single_file_utils.py\nindex 55b438b036..4dc4c71013 100644\n--- a/src/diffusers/loaders/single_file_utils.py\n+++ b/src/diffusers/loaders/single_file_utils.py\n@@ -20,9 +20,7 @@ from contextlib import nullcontext\n from io import BytesIO\n \n import requests\n-import torch\n import yaml\n-from safetensors.torch import load_file as safe_load\n from transformers import (\n CLIPTextConfig,\n CLIPTextModel,\n@@ -1117,7 +1115,9 @@ def create_text_encoders_and_tokenizers_from_ldm(\n elif model_type == \"FrozenCLIPEmbedder\":\n try:\n config_name = \"openai/clip-vit-large-patch14\"\n- text_encoder = create_text_encoder_from_ldm_clip_checkpoint(config_name, checkpoint, local_files_only=local_files_only)\n+ text_encoder = create_text_encoder_from_ldm_clip_checkpoint(\n+ config_name, checkpoint, local_files_only=local_files_only\n+ )\n tokenizer = CLIPTokenizer.from_pretrained(config_name, local_files_only=local_files_only)\n \n except Exception:\n@@ -1159,7 +1159,9 @@ def create_text_encoders_and_tokenizers_from_ldm(\n try:\n config_name = \"openai/clip-vit-large-patch14\"\n tokenizer = CLIPTokenizer.from_pretrained(config_name, local_files_only=local_files_only)\n- text_encoder = create_text_encoder_from_ldm_clip_checkpoint(config_name, checkpoint, local_files_only=local_files_only)\n+ text_encoder = create_text_encoder_from_ldm_clip_checkpoint(\n+ config_name, checkpoint, local_files_only=local_files_only\n+ )\n \n except Exception:\n raise ValueError(\n", "difficulty": 0, "changed_files": ["src/diffusers/loaders/single_file_utils.py"], "commit_link": "https://github.com/huggingface/diffusers/tree/68ddb2559e616656301858d441a523ebd64a710f"}
data/python/6cbb12e.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"language": "Python", "id": 27, "repo_owner": "mwaskom", "repo_name": "seaborn", "head_branch": "maint/pandas-2.2.0-compat", "workflow_name": "CI", "workflow_filename": "ci.yaml", "workflow_path": ".github/workflows/ci.yaml", "contributor": "mwaskom", "sha_fail": "6cbb12e47665eda2c687b4431d6ce789e74ea4a4", "sha_success": "a38dfd6e77b107ecc874260e2e30d56b7b8fb85a", "workflow": "name: CI\n\non:\n push:\n branches: [master, v0.*]\n pull_request:\n branches: master\n schedule:\n - cron: '0 6 * * 1,4' # Each Monday and Thursday at 06:00 UTC\n workflow_dispatch:\n\npermissions:\n contents: read\n\nenv:\n NB_KERNEL: python\n MPLBACKEND: Agg\n SEABORN_DATA: ${{ github.workspace }}/seaborn-data\n PYDEVD_DISABLE_FILE_VALIDATION: 1\n\njobs:\n build-docs:\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1\n\n - name: Setup Python 3.11\n uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0\n with:\n python-version: \"3.11\"\n\n - name: Install seaborn\n run: |\n pip install --upgrade pip\n pip install .[stats,docs]\n\n - name: Install pandoc\n run: |\n wget https://github.com/jgm/pandoc/releases/download/3.1.11/pandoc-3.1.11-1-amd64.deb\n sudo dpkg -i pandoc-3.1.11-1-amd64.deb\n\n - name: Cache datasets\n run: |\n git clone https://github.com/mwaskom/seaborn-data.git\n ls $SEABORN_DATA\n\n - name: Build docs\n env:\n SPHINXOPTS: -j `nproc`\n run: |\n cd doc\n make -j `nproc` notebooks\n make html\n\n\n run-tests:\n runs-on: ubuntu-latest\n\n strategy:\n matrix:\n python: [\"3.8\", \"3.9\", \"3.10\", \"3.11\", \"3.12\"]\n install: [full]\n deps: [latest]\n\n include:\n - python: \"3.8\"\n install: full\n deps: pinned\n - python: \"3.11\"\n install: light\n deps: latest\n\n steps:\n - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1\n\n - name: Setup Python ${{ matrix.python }}\n uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0\n with:\n python-version: ${{ matrix.python }}\n allow-prereleases: true\n\n - name: Install seaborn\n run: |\n pip install --upgrade pip wheel\n if [[ ${{matrix.install}} == 'full' ]]; then EXTRAS=',stats'; fi\n if [[ ${{matrix.deps }} == 'pinned' ]]; then DEPS='-r ci/deps_pinned.txt'; fi\n pip install .[dev$EXTRAS] $DEPS\n\n - name: Run tests\n run: make test\n\n - name: Upload coverage\n uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # v3.1.4\n if: ${{ success() }}\n\n lint:\n runs-on: ubuntu-latest\n strategy:\n fail-fast: false\n steps:\n\n - name: Checkout\n uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1\n\n - name: Setup Python\n uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0\n\n - name: Install tools\n run: pip install mypy flake8\n\n - name: Flake8\n run: make lint\n\n - name: Type checking\n run: make typecheck\n", "logs": [{"step_name": "run-tests (3.8, full, pinned)/5_Run tests.txt", "log": "##[group]Run make test\n\u001b[36;1mmake test\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n NB_KERNEL: python\n MPLBACKEND: Agg\n SEABORN_DATA: /home/runner/work/seaborn/seaborn/seaborn-data\n PYDEVD_DISABLE_FILE_VALIDATION: 1\n pythonLocation: /opt/hostedtoolcache/Python/3.8.18/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.8.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.8.18/x64/lib\n##[endgroup]\npytest -n auto --cov=seaborn --cov=tests --cov-config=setup.cfg tests\n============================= test session starts ==============================\nplatform linux -- Python 3.8.18, pytest-7.4.4, pluggy-1.3.0\nrootdir: /home/runner/work/seaborn/seaborn\nconfigfile: pyproject.toml\nplugins: xdist-3.5.0, cov-4.1.0\ncreated: 4/4 workers\n4 workers [2380 items]\n\n........................................................................ [ 3%]\n........................................................................ [ 6%]\n........................................................................ [ 9%]\n........................................................................ [ 12%]\n........................................................................ [ 15%]\n........................................................................ [ 18%]\n........................................................................ [ 21%]\n........................................................................ [ 24%]\n.........................................................s.............. [ 27%]\nF...........................................................sss......... [ 30%]\n........................................................................ [ 33%]\n........................s............................................... [ 36%]\n......................................................................... [ 39%]\n................................ss...................................... [ 42%]\n........................................................................ [ 45%]\n.....................................................s.s................ [ 48%]\n.....sss................................................................ [ 51%]\n........................................................................ [ 54%]\ns.............ss........................................................ [ 57%]\n............................................................s........... [ 60%]\n...ss.................................................................s. [ 63%]\n..........x..................................................x.......... [ 66%]\n.x................................x...x................................. [ 69%]\n...................................................ss................ss. [ 72%]\n........................................................................ [ 75%]\n........................................................................ [ 78%]\n........................................................................ [ 81%]\n......................................................................... [ 84%]\n..................x..................................................... [ 87%]\n........................................................................ [ 90%]\n........................................................................ [ 93%]\n........................................................................ [ 96%]\n........................................................................ [ 99%]\n.. [100%]\n=================================== FAILURES ===================================\n_________________ TestBarPlot.test_datetime_native_scale_axis __________________\n[gw3] linux -- Python 3.8.18 /opt/hostedtoolcache/Python/3.8.18/x64/bin/python\n\n> ???\nE KeyError: 'ME'\n\npandas/_libs/tslibs/offsets.pyx:3502: KeyError\n\nThe above exception was the direct cause of the following exception:\n\n> ???\n\npandas/_libs/tslibs/offsets.pyx:3604: \n_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ \n\n> ???\nE ValueError: Invalid frequency: ME\n\npandas/_libs/tslibs/offsets.pyx:3508: ValueError\n\nThe above exception was the direct cause of the following exception:\n\nself = <tests.test_categorical.TestBarPlot object at 0x7f2ce1dfd6d0>\n\n def test_datetime_native_scale_axis(self):\n \n> x = pd.date_range(\"2010-01-01\", periods=20, freq=\"ME\")\n\ntests/test_categorical.py:2081: \n_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ \n/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/site-packages/pandas/core/indexes/datetimes.py:1069: in date_range\n dtarr = DatetimeArray._generate_range(\n/opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/site-packages/pandas/core/arrays/datetimes.py:377: in _generate_range\n freq = to_offset(freq)\npandas/_libs/tslibs/offsets.pyx:3515: in pandas._libs.tslibs.offsets.to_offset\n ???\n_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ \n\n> ???\nE ValueError: Invalid frequency: ME\n\npandas/_libs/tslibs/offsets.pyx:3612: ValueError\n=============================== warnings summary ===============================\n../../../../../opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/_collections_abc.py:832: 4 warnings\ntests/test_rcmod.py: 23 warnings\ntests/_core/test_plot.py: 392 warnings\ntests/_marks/test_bar.py: 34 warnings\ntests/_marks/test_area.py: 12 warnings\ntests/_marks/test_dot.py: 28 warnings\ntests/_marks/test_line.py: 66 warnings\ntests/_marks/test_text.py: 16 warnings\n /opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/_collections_abc.py:832: MatplotlibDeprecationWarning: \n The savefig.jpeg_quality rcparam was deprecated in Matplotlib 3.3 and will be removed two minor releases later.\n self[key] = other[key]\n\ntests/test_rcmod.py: 23 warnings\n /opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/_collections_abc.py:832: MatplotlibDeprecationWarning: \n The animation.avconv_args rcparam was deprecated in Matplotlib 3.3 and will be removed two minor releases later.\n self[key] = other[key]\n\ntests/test_rcmod.py: 23 warnings\n /opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/_collections_abc.py:832: MatplotlibDeprecationWarning: \n The animation.avconv_path rcparam was deprecated in Matplotlib 3.3 and will be removed two minor releases later.\n self[key] = other[key]\n\ntests/test_rcmod.py: 23 warnings\n /opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/_collections_abc.py:832: MatplotlibDeprecationWarning: \n The animation.html_args rcparam was deprecated in Matplotlib 3.3 and will be removed two minor releases later.\n self[key] = other[key]\n\ntests/test_rcmod.py: 23 warnings\n /opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/_collections_abc.py:832: MatplotlibDeprecationWarning: \n The keymap.all_axes rcparam was deprecated in Matplotlib 3.3 and will be removed two minor releases later.\n self[key] = other[key]\n\ntests/test_rcmod.py: 23 warnings\n /opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/_collections_abc.py:832: MatplotlibDeprecationWarning: \n The text.latex.preview rcparam was deprecated in Matplotlib 3.3 and will be removed two minor releases later.\n self[key] = other[key]\n\ntests/_core/test_plot.py::TestFacetInterface::test_layout_algo[constrained]\n /home/runner/work/seaborn/seaborn/seaborn/_core/plot.py:1002: UserWarning: There are no gridspecs with layoutgrids. Possibly did not call parent GridSpec with the \"figure\" keyword\n self._figure.savefig(loc, **kwargs)\n\ntests/_core/test_scales.py::TestNominal::test_empty_data\n /home/runner/work/seaborn/seaborn/seaborn/_core/rules.py:170: DeprecationWarning: The default dtype for empty Series will be 'object' instead of 'float64' in a future version. Specify a dtype explicitly to silence this warning.\n if variable_type(pd.Series(order)) == \"numeric\":\n\n-- Docs: https://docs.pytest.org/en/stable/how-to/capture-warnings.html\n\n---------- coverage: platform linux, python 3.8.18-final-0 -----------\nName Stmts Miss Cover\n------------------------------------------------------\nseaborn/__init__.py 16 0 100%\nseaborn/_base.py 762 19 98%\nseaborn/_compat.py 76 36 53%\nseaborn/_core/__init__.py 0 0 100%\nseaborn/_core/data.py 116 9 92%\nseaborn/_core/exceptions.py 10 1 90%\nseaborn/_core/groupby.py 54 0 100%\nseaborn/_core/moves.py 109 0 100%\nseaborn/_core/plot.py 827 11 99%\nseaborn/_core/properties.py 422 2 99%\nseaborn/_core/rules.py 63 3 95%\nseaborn/_core/scales.py 502 47 91%\nseaborn/_core/subplots.py 140 0 100%\nseaborn/_core/typing.py 25 1 96%\nseaborn/_docstrings.py 40 5 88%\nseaborn/_marks/__init__.py 0 0 100%\nseaborn/_marks/area.py 86 3 97%\nseaborn/_marks/bar.py 123 0 100%\nseaborn/_marks/base.py 132 2 98%\nseaborn/_marks/dot.py 92 0 100%\nseaborn/_marks/line.py 116 0 100%\nseaborn/_marks/text.py 33 0 100%\nseaborn/_statistics.py 325 5 98%\nseaborn/_stats/__init__.py 0 0 100%\nseaborn/_stats/aggregation.py 41 1 98%\nseaborn/_stats/base.py 26 1 96%\nseaborn/_stats/counting.py 96 0 100%\nseaborn/_stats/density.py 99 4 96%\nseaborn/_stats/order.py 30 3 90%\nseaborn/_stats/regression.py 23 1 96%\nseaborn/_testing.py 52 4 92%\nseaborn/algorithms.py 60 0 100%\nseaborn/axisgrid.py 1039 34 97%\nseaborn/categorical.py 1208 13 99%\nseaborn/distributions.py 952 34 96%\nseaborn/matrix.py 551 26 95%\nseaborn/miscplot.py 24 0 100%\nseaborn/objects.py 15 0 100%\nseaborn/palettes.py 249 1 99%\nseaborn/rcmod.py 104 0 100%\nseaborn/regression.py 336 11 97%\nseaborn/relational.py 287 1 99%\nseaborn/utils.py 375 14 96%\ntests/__init__.py 0 0 100%\ntests/_core/__init__.py 0 0 100%\ntests/_core/test_data.py 293 10 97%\ntests/_core/test_groupby.py 83 0 100%\ntests/_core/test_moves.py 231 0 100%\ntests/_core/test_plot.py 1556 39 97%\ntests/_core/test_properties.py 375 4 99%\ntests/_core/test_rules.py 72 0 100%\ntests/_core/test_scales.py 547 0 100%\ntests/_core/test_subplots.py 368 0 100%\ntests/_marks/__init__.py 0 0 100%\ntests/_marks/test_area.py 84 0 100%\ntests/_marks/test_bar.py 152 0 100%\ntests/_marks/test_base.py 102 0 100%\ntests/_marks/test_dot.py 136 0 100%\ntests/_marks/test_line.py 298 0 100%\ntests/_marks/test_text.py 98 1 99%\ntests/_stats/__init__.py 0 0 100%\ntests/_stats/test_aggregation.py 84 0 100%\ntests/_stats/test_counting.py 180 0 100%\ntests/_stats/test_density.py 142 3 98%\ntests/_stats/test_order.py 64 1 98%\ntests/_stats/test_regression.py 36 0 100%\ntests/conftest.py 107 1 99%\ntests/test_algorithms.py 110 0 100%\ntests/test_axisgrid.py 1314 25 98%\ntests/test_base.py 1008 3 99%\ntests/test_categorical.py 2169 48 98%\ntests/test_distributions.py 1526 12 99%\ntests/test_docstrings.py 19 0 100%\ntests/test_matrix.py 861 28 97%\ntests/test_miscplot.py 24 0 100%\ntests/test_objects.py 11 0 100%\ntests/test_palettes.py 304 0 100%\ntests/test_rcmod.py 189 21 89%\ntests/test_regression.py 441 6 99%\ntests/test_relational.py 1213 13 99%\ntests/test_statistics.py 489 2 99%\ntests/test_utils.py 382 4 99%\n------------------------------------------------------\nTOTAL 24704 513 98%\n\n=========================== short test summary info ============================\nFAILED tests/test_categorical.py::TestBarPlot::test_datetime_native_scale_axis - ValueError: Invalid frequency: ME\n= 1 failed, 2350 passed, 23 skipped, 6 xfailed, 692 warnings in 276.25s (0:04:36) =\nmake: *** [Makefile:4: test] Error 1\n##[error]Process completed with exit code 2.\n"}], "diff": "diff --git a/tests/test_categorical.py b/tests/test_categorical.py\nindex 8e5fd41d..3df78247 100644\n--- a/tests/test_categorical.py\n+++ b/tests/test_categorical.py\n@@ -2078,7 +2078,7 @@ class TestBarPlot(SharedAggTests):\n \n def test_datetime_native_scale_axis(self):\n \n- x = pd.date_range(\"2010-01-01\", periods=20, freq=\"ME\")\n+ x = pd.date_range(\"2010-01-01\", periods=20, freq=\"MS\")\n y = np.arange(20)\n ax = barplot(x=x, y=y, native_scale=True)\n assert \"Date\" in ax.xaxis.get_major_locator().__class__.__name__\n", "difficulty": 2, "changed_files": ["tests/test_categorical.py"], "commit_link": "https://github.com/mwaskom/seaborn/tree/6cbb12e47665eda2c687b4431d6ce789e74ea4a4"}
data/python/76777e3.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"language": "Python", "id": 35, "repo_owner": "aws", "repo_name": "serverless-application-model", "head_branch": "layer-version-fix", "workflow_name": "Tests", "workflow_filename": "build.yml", "workflow_path": ".github/workflows/build.yml", "contributor": "aws", "sha_fail": "76777e3ab9a15fe11ef4219804cf9c09c04fcf13", "sha_success": "1b6c3f8087a787c8c2bcb0dc201f11a2c4694428", "workflow": "name: Tests\n\non:\n push:\n branches:\n - develop\n - \"feat-*\"\n pull_request:\n workflow_dispatch:\n merge_group:\n types: [checks_requested]\n\njobs:\n build:\n name: ${{ matrix.os }} / ${{ matrix.python }}\n runs-on: ${{ matrix.os }}\n strategy:\n fail-fast: false\n matrix:\n os:\n - ubuntu-latest\n python:\n - \"3.7\"\n - \"3.8\"\n - \"3.9\"\n - \"3.10\"\n - \"3.11\"\n steps:\n - uses: actions/checkout@v3\n - uses: actions/setup-python@v4\n with:\n python-version: ${{ matrix.python }}\n - run: make init\n - run: make pr\n", "logs": [{"step_name": "ubuntu-latest 3.7/5_Run make pr.txt", "log": "##[group]Run make pr\n\u001b[36;1mmake pr\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.7.17/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.7.17/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.7.17/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.7.17/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.7.17/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.7.17/x64/lib\n##[endgroup]\n# Checking latest schema was generated (run `make schema` if this fails)\nmkdir -p .tmp\npython -m samtranslator.internal.schema_source.schema --sam-schema .tmp/sam.schema.json --cfn-schema schema_source/cloudformation.schema.json --unified-schema .tmp/schema.json\ndiff -u schema_source/sam.schema.json .tmp/sam.schema.json\ndiff -u samtranslator/schema/schema.json .tmp/schema.json\nblack --check setup.py samtranslator tests integration bin schema_source\nwould reformat /home/runner/work/serverless-application-model/serverless-application-model/samtranslator/utils/actions.py\nwould reformat /home/runner/work/serverless-application-model/serverless-application-model/samtranslator/utils/traverse.py\n\nOh no! \ud83d\udca5 \ud83d\udc94 \ud83d\udca5\n2 files would be reformatted, 363 files would be left unchanged.\nmake: *** [Makefile:40: format-check] Error 1\n##[error]Process completed with exit code 2.\n"}, {"step_name": "ubuntu-latest 3.8/5_Run make pr.txt", "log": "##[group]Run make pr\n\u001b[36;1mmake pr\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.8.18/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.8.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.8.18/x64/lib\n##[endgroup]\n# Checking latest schema was generated (run `make schema` if this fails)\nmkdir -p .tmp\npython -m samtranslator.internal.schema_source.schema --sam-schema .tmp/sam.schema.json --cfn-schema schema_source/cloudformation.schema.json --unified-schema .tmp/schema.json\ndiff -u schema_source/sam.schema.json .tmp/sam.schema.json\ndiff -u samtranslator/schema/schema.json .tmp/schema.json\nblack --check setup.py samtranslator tests integration bin schema_source\nwould reformat /home/runner/work/serverless-application-model/serverless-application-model/samtranslator/utils/actions.py\nwould reformat /home/runner/work/serverless-application-model/serverless-application-model/samtranslator/utils/traverse.py\n\nOh no! \ud83d\udca5 \ud83d\udc94 \ud83d\udca5\n2 files would be reformatted, 363 files would be left unchanged.\nmake: *** [Makefile:40: format-check] Error 1\n##[error]Process completed with exit code 2.\n"}, {"step_name": "ubuntu-latest 3.9/5_Run make pr.txt", "log": "##[group]Run make pr\n\u001b[36;1mmake pr\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.9.18/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib\n##[endgroup]\n# Checking latest schema was generated (run `make schema` if this fails)\nmkdir -p .tmp\npython -m samtranslator.internal.schema_source.schema --sam-schema .tmp/sam.schema.json --cfn-schema schema_source/cloudformation.schema.json --unified-schema .tmp/schema.json\ndiff -u schema_source/sam.schema.json .tmp/sam.schema.json\ndiff -u samtranslator/schema/schema.json .tmp/schema.json\nblack --check setup.py samtranslator tests integration bin schema_source\nwould reformat /home/runner/work/serverless-application-model/serverless-application-model/samtranslator/utils/actions.py\nwould reformat /home/runner/work/serverless-application-model/serverless-application-model/samtranslator/utils/traverse.py\n\nOh no! \ud83d\udca5 \ud83d\udc94 \ud83d\udca5\n2 files would be reformatted, 363 files would be left unchanged.\nmake: *** [Makefile:40: format-check] Error 1\n##[error]Process completed with exit code 2.\n"}, {"step_name": "ubuntu-latest 3.10/5_Run make pr.txt", "log": "##[group]Run make pr\n\u001b[36;1mmake pr\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.10.13/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.10.13/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.10.13/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.10.13/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.10.13/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.10.13/x64/lib\n##[endgroup]\n# Checking latest schema was generated (run `make schema` if this fails)\nmkdir -p .tmp\npython -m samtranslator.internal.schema_source.schema --sam-schema .tmp/sam.schema.json --cfn-schema schema_source/cloudformation.schema.json --unified-schema .tmp/schema.json\ndiff -u schema_source/sam.schema.json .tmp/sam.schema.json\ndiff -u samtranslator/schema/schema.json .tmp/schema.json\nblack --check setup.py samtranslator tests integration bin schema_source\nwould reformat /home/runner/work/serverless-application-model/serverless-application-model/samtranslator/utils/actions.py\nwould reformat /home/runner/work/serverless-application-model/serverless-application-model/samtranslator/utils/traverse.py\n\nOh no! \ud83d\udca5 \ud83d\udc94 \ud83d\udca5\n2 files would be reformatted, 363 files would be left unchanged.\nmake: *** [Makefile:40: format-check] Error 1\n##[error]Process completed with exit code 2.\n"}, {"step_name": "ubuntu-latest 3.11/5_Run make pr.txt", "log": "##[group]Run make pr\n\u001b[36;1mmake pr\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.11.6/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.11.6/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.11.6/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.11.6/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.11.6/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.11.6/x64/lib\n##[endgroup]\n# Checking latest schema was generated (run `make schema` if this fails)\nmkdir -p .tmp\npython -m samtranslator.internal.schema_source.schema --sam-schema .tmp/sam.schema.json --cfn-schema schema_source/cloudformation.schema.json --unified-schema .tmp/schema.json\ndiff -u schema_source/sam.schema.json .tmp/sam.schema.json\ndiff -u samtranslator/schema/schema.json .tmp/schema.json\nblack --check setup.py samtranslator tests integration bin schema_source\nwould reformat /home/runner/work/serverless-application-model/serverless-application-model/samtranslator/utils/actions.py\nwould reformat /home/runner/work/serverless-application-model/serverless-application-model/samtranslator/utils/traverse.py\n\nOh no! \ud83d\udca5 \ud83d\udc94 \ud83d\udca5\n2 files would be reformatted, 363 files would be left unchanged.\nmake: *** [Makefile:40: format-check] Error 1\n##[error]Process completed with exit code 2.\n"}], "diff": "diff --git a/samtranslator/utils/actions.py b/samtranslator/utils/actions.py\nindex 10c1fd7b..921914a0 100644\n--- a/samtranslator/utils/actions.py\n+++ b/samtranslator/utils/actions.py\n@@ -15,7 +15,7 @@ class Action(ABC):\n \n class ResolveDependsOn(Action):\n DependsOn = \"DependsOn\"\n- \n+\n def __init__(self, resolution_data: Dict[str, str]):\n \"\"\"\n Initializes ResolveDependsOn. Where data necessary to resolve execute can be provided.\ndiff --git a/samtranslator/utils/traverse.py b/samtranslator/utils/traverse.py\nindex 3ab9e1c5..ea5ec5e7 100644\n--- a/samtranslator/utils/traverse.py\n+++ b/samtranslator/utils/traverse.py\n@@ -19,7 +19,7 @@ def traverse(\n \n :param input_value: Any primitive type (dict, array, string etc) whose value might contain a changed value\n :param actions: Method that will be called to actually resolve the function.\n- :return: Modified `input` with values resolved \n+ :return: Modified `input` with values resolved\n \"\"\"\n \n for action in actions:\n", "difficulty": 0, "changed_files": ["samtranslator/utils/actions.py", "samtranslator/utils/traverse.py"], "commit_link": "https://github.com/aws/serverless-application-model/tree/76777e3ab9a15fe11ef4219804cf9c09c04fcf13"}
data/python/76e35ec.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"language": "Python", "id": 13, "repo_owner": "hugsy", "repo_name": "gef", "head_branch": "use_info_proc_mapping", "workflow_name": "CI Test for GEF", "workflow_filename": "run-tests.yml", "workflow_path": ".github/workflows/run-tests.yml", "contributor": "hugsy", "sha_fail": "76e35eca93562514943c5842cf2b0b8ec94a4763", "sha_success": "d8b84e55be36327b0255559783551e9563ebf710", "workflow": "name: CI Test for GEF\n\non:\n push:\n branches:\n - main\n\n pull_request:\n branches:\n - main\n\njobs:\n build:\n strategy:\n fail-fast: false\n matrix:\n os:\n - ubuntu-22.04\n - ubuntu-20.04\n name: \"Run Unit tests on ${{ matrix.os }}\"\n runs-on: ${{ matrix.os }}\n defaults:\n run:\n shell: bash\n\n steps:\n - uses: actions/checkout@v3\n\n - name: Install python and toolchain\n run: |\n sudo apt-get update\n sudo apt-get install -y gdb-multiarch python3-dev python3-pip python3-wheel python3-setuptools git cmake gcc g++ pkg-config libglib2.0-dev gdbserver qemu-user\n sudo python3 -m pip install --upgrade pip\n\n - name: Set runtime environment variables\n run: |\n echo PY_VER=`gdb -q -nx -ex \"pi print('.'.join(map(str, sys.version_info[:2])))\" -ex quit` >> $GITHUB_ENV\n echo GEF_CI_NB_CPU=`grep -c ^processor /proc/cpuinfo` >> $GITHUB_ENV\n echo GEF_CI_ARCH=`uname --processor` >> $GITHUB_ENV\n echo GEF_CI_CACHE_DIR=`python3 -m pip cache dir` >> $GITHUB_ENV\n\n - name: Cache dependencies\n uses: actions/cache@v3\n id: cache-deps\n env:\n cache-name: cache-deps\n with:\n key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}\n path: ${{ env.GEF_CI_CACHE_DIR }}\n restore-keys:\n ${{ runner.os }}-pip-${{ env.cache-name }}-\n ${{ runner.os }}-pip-\n ${{ runner.os }}-${{ env.cache-name }}-\n ${{ runner.os }}-\n\n - name: Install Python Requirements\n run: |\n mkdir -p ${{ env.GEF_CI_CACHE_DIR }}\n python${{ env.PY_VER }} -m pip install --user --upgrade -r tests/requirements.txt\n\n - name: Setup GEF\n run: |\n echo \"source $(pwd)/gef.py\" > ~/.gdbinit\n gdb -q -ex 'gef missing' -ex 'gef help' -ex 'gef config' -ex start -ex continue -ex quit /bin/pwd\n\n - name: Run Tests\n run: |\n make -C tests/binaries -j ${{ env.GEF_CI_NB_CPU }}\n python${{ env.PY_VER }} -m pytest --forked -n ${{ env.GEF_CI_NB_CPU }} -v -k \"not benchmark\" tests/\n", "logs": [{"step_name": "Run Unit tests on ubuntu-22.04/8_Run Tests.txt", "log": "##[group]Run make -C tests/binaries -j 4\n\u001b[36;1mmake -C tests/binaries -j 4\u001b[0m\n\u001b[36;1mpython3.10 -m pytest --forked -n 4 -v -k \"not benchmark\" tests/\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\nenv:\n PY_VER: 3.10\n GEF_CI_NB_CPU: 4\n GEF_CI_ARCH: x86_64\n GEF_CI_CACHE_DIR: /home/runner/.cache/pip\n##[endgroup]\nmake: Entering directory '/home/runner/work/gef/gef/tests/binaries'\n[+] Building '/tmp/bss.out'\n[+] Building '/tmp/canary.out'\n[+] Building '/tmp/checksec-no-canary.out'\n[+] Building '/tmp/checksec-no-nx.out'\n[+] Building '/tmp/checksec-no-pie.out'\n[+] Building '/tmp/default.out'\n[+] Building '/tmp/format-string-helper.out'\n[+] Building '/tmp/heap-analysis.out'\n[+] Building '/tmp/heap-bins.out'\n[+] Building '/tmp/heap-fastbins.out'\n[+] Building '/tmp/heap-multiple-heaps.out'\n[+] Building '/tmp/heap-non-main.out'\n[+] Building '/tmp/heap-tcache.out'\n[+] Building '/tmp/heap.out'\n[+] Building '/tmp/memwatch.out'\n[+] Building '/tmp/mmap-known-address.out'\n[+] Building '/tmp/nested.out'\n[+] Building '/tmp/nested2.out'\n[+] Building '/tmp/pattern.out'\n[+] Building '/tmp/pcustom.out'\n[+] Building '/tmp/class.out'\nmake: Leaving directory '/home/runner/work/gef/gef/tests/binaries'\n/home/runner/.local/lib/python3.10/site-packages/pytest_benchmark/logger.py:46: PytestBenchmarkWarning: Benchmarks are automatically disabled because xdist plugin is active.Benchmarks cannot be performed reliably in a parallelized environment.\n warner(PytestBenchmarkWarning(text))\n============================= test session starts ==============================\nplatform linux -- Python 3.10.12, pytest-7.4.4, pluggy-1.3.0 -- /usr/bin/python3.10\ncachedir: .pytest_cache\nbenchmark: 4.0.0 (defaults: timer=time.perf_counter disable_gc=False min_rounds=5 min_time=0.000005 max_time=1.0 calibration_precision=10 warmup=False warmup_iterations=100000)\nrootdir: /home/runner/work/gef/gef/tests\nconfigfile: pytest.ini\nplugins: benchmark-4.0.0, forked-1.6.0, xdist-3.5.0, cov-4.1.0\ncreated: 4/4 workers\n4 workers [168 items]\n\nscheduling tests via LoadScheduling\n\ntests/api/deprecated.py::GefFuncDeprecatedApi::test_deprecated_elf_values \ntests/api/gef_heap.py::GefHeapApi::test_func_gef_heap_tidx2size \ntests/commands/aliases.py::AliasesCommand::test_cmd_aliases_rm \ntests/api/gef_session.py::GefSessionApi::test_func_get_pid \n[gw0] [ 0%] PASSED tests/api/deprecated.py::GefFuncDeprecatedApi::test_deprecated_elf_values \ntests/api/deprecated.py::GefFuncDeprecatedApi::test_deprecated_gef_attributes \n[gw3] [ 1%] PASSED tests/commands/aliases.py::AliasesCommand::test_cmd_aliases_rm \ntests/commands/aslr.py::AslrCommand::test_cmd_aslr_show \n[gw2] [ 1%] PASSED tests/api/gef_session.py::GefSessionApi::test_func_get_pid \ntests/api/gef_session.py::GefSessionApi::test_root_dir_local \n[gw1] [ 2%] PASSED tests/api/gef_heap.py::GefHeapApi::test_func_gef_heap_tidx2size \ntests/api/gef_memory.py::GefMemoryApi::test_api_gef_memory_only_running \n[gw0] [ 2%] PASSED tests/api/deprecated.py::GefFuncDeprecatedApi::test_deprecated_gef_attributes \ntests/api/gef_arch.py::GefArchApi::test_api_gef_arch_ptrsize \n[gw3] [ 3%] PASSED tests/commands/aslr.py::AslrCommand::test_cmd_aslr_show \ntests/commands/aslr.py::AslrCommand::test_cmd_aslr_toggle \n[gw1] [ 4%] PASSED tests/api/gef_memory.py::GefMemoryApi::test_api_gef_memory_only_running \ntests/api/gef_memory.py::GefMemoryApi::test_api_gef_memory_parse_info_proc_maps \n[gw2] [ 4%] PASSED tests/api/gef_session.py::GefSessionApi::test_root_dir_local \ntests/api/gef_session.py::GefSessionApi::test_root_dir_qemu \n[gw0] [ 5%] PASSED tests/api/gef_arch.py::GefArchApi::test_api_gef_arch_ptrsize \ntests/api/gef_arch.py::GefArchApi::test_api_gef_arch_x86 \n[gw0] [ 5%] SKIPPED tests/api/gef_arch.py::GefArchApi::test_api_gef_arch_x86 \ntests/api/gef_arch.py::GefArchApi::test_api_gef_arch_x86_64 \n[gw3] [ 6%] PASSED tests/commands/aslr.py::AslrCommand::test_cmd_aslr_toggle \ntests/commands/canary.py::CanaryCommand::test_cmd_canary \n[gw1] [ 7%] FAILED tests/api/gef_memory.py::GefMemoryApi::test_api_gef_memory_parse_info_proc_maps \ntests/api/gef_memory.py::GefMemoryApi::test_api_gef_memory_parse_info_proc_maps_expected_format \n[gw2] [ 7%] PASSED tests/api/gef_session.py::GefSessionApi::test_root_dir_qemu \ntests/api/gef_session.py::GefSessionApi::test_root_dir_remote \n[gw3] [ 8%] PASSED tests/commands/canary.py::CanaryCommand::test_cmd_canary \ntests/commands/canary.py::CanaryCommand::test_overwrite_canary \n[gw0] [ 8%] PASSED tests/api/gef_arch.py::GefArchApi::test_api_gef_arch_x86_64 \ntests/api/gef_disassemble.py::GefDisassembleApiFunction::test_func_gef_disassemble \n[gw1] [ 9%] FAILED tests/api/gef_memory.py::GefMemoryApi::test_api_gef_memory_parse_info_proc_maps_expected_format \ntests/api/gef_memory.py::GefMemoryApi::test_func_parse_maps_local_procfs \n[gw0] [ 10%] PASSED tests/api/gef_disassemble.py::GefDisassembleApiFunction::test_func_gef_disassemble \ntests/api/gef_disassemble.py::GefDisassembleApiFunction::test_func_gef_disassemble_page_border \n[gw3] [ 10%] PASSED tests/commands/canary.py::CanaryCommand::test_overwrite_canary \ntests/commands/checksec.py::ChecksecCommandNoCanary::test_cmd_checksec \n[gw2] [ 11%] PASSED tests/api/gef_session.py::GefSessionApi::test_root_dir_remote \ntests/api/misc.py::MiscFunctionTest::test_func_gef_convenience \n[gw1] [ 11%] PASSED tests/api/gef_memory.py::GefMemoryApi::test_func_parse_maps_local_procfs \ntests/api/gef_memory.py::GefMemoryApi::test_func_parse_maps_remote_gdbserver \n[gw3] [ 12%] PASSED tests/commands/checksec.py::ChecksecCommandNoCanary::test_cmd_checksec \ntests/commands/checksec.py::ChecksecCommandNoNx::test_cmd_checksec \n[gw0] [ 13%] PASSED tests/api/gef_disassemble.py::GefDisassembleApiFunction::test_func_gef_disassemble_page_border \ntests/api/gef_heap.py::GefHeapApi::test_class_glibcarena_main_arena \n[gw2] [ 13%] PASSED tests/api/misc.py::MiscFunctionTest::test_func_gef_convenience \ntests/api/misc.py::MiscFunctionTest::test_func_parse_address \n[gw3] [ 14%] PASSED tests/commands/checksec.py::ChecksecCommandNoNx::test_cmd_checksec \ntests/commands/checksec.py::ChecksecCommandNoPie::test_cmd_checksec \n[gw0] [ 14%] PASSED tests/api/gef_heap.py::GefHeapApi::test_class_glibcarena_main_arena \ntests/api/gef_heap.py::GefHeapApi::test_func_gef_heap_csize2tidx \n[gw2] [ 15%] PASSED tests/api/misc.py::MiscFunctionTest::test_func_parse_address \ntests/api/misc.py::MiscFunctionTest::test_func_show_last_exception \n[gw3] [ 16%] PASSED tests/commands/checksec.py::ChecksecCommandNoPie::test_cmd_checksec \ntests/commands/dereference.py::DereferenceCommand::test_cmd_dereference \n[gw0] [ 16%] PASSED tests/api/gef_heap.py::GefHeapApi::test_func_gef_heap_csize2tidx \ntests/api/gef_heap.py::GefHeapApi::test_func_gef_heap_malloc_align_address \n[gw2] [ 17%] PASSED tests/api/misc.py::MiscFunctionTest::test_func_show_last_exception \ntests/api/misc.py::MiscFunctionTest::test_func_which \n[gw3] [ 17%] PASSED tests/commands/dereference.py::DereferenceCommand::test_cmd_dereference \ntests/commands/dereference.py::DereferenceCommand::test_cmd_dereference_backwards \n[gw0] [ 18%] PASSED tests/api/gef_heap.py::GefHeapApi::test_func_gef_heap_malloc_align_address \ntests/commands/dereference.py::DereferenceCommand::test_cmd_dereference_forwards \n[gw2] [ 19%] PASSED tests/api/misc.py::MiscFunctionTest::test_func_which \ntests/commands/aliases.py::AliasesCommand::test_cmd_aliases_add \n[gw3] [ 19%] PASSED tests/commands/dereference.py::DereferenceCommand::test_cmd_dereference_backwards \ntests/commands/gef.py::GefCommand::test_cmd_gef_config_get \n[gw2] [ 20%] PASSED tests/commands/aliases.py::AliasesCommand::test_cmd_aliases_add \ntests/commands/aliases.py::AliasesCommand::test_cmd_aliases_list \n[gw0] [ 20%] PASSED tests/commands/dereference.py::DereferenceCommand::test_cmd_dereference_forwards \ntests/commands/edit_flags.py::EditFlagsCommand::test_cmd_edit_flags_disable \n[gw3] [ 21%] PASSED tests/commands/gef.py::GefCommand::test_cmd_gef_config_get \ntests/commands/gef.py::GefCommand::test_cmd_gef_config_set \n[gw2] [ 22%] PASSED tests/commands/aliases.py::AliasesCommand::test_cmd_aliases_list \ntests/commands/heap.py::HeapCommand::test_cmd_heap_chunks \n[gw0] [ 22%] PASSED tests/commands/edit_flags.py::EditFlagsCommand::test_cmd_edit_flags_disable \ntests/commands/edit_flags.py::EditFlagsCommand::test_cmd_edit_flags_enable \n[gw3] [ 23%] PASSED tests/commands/gef.py::GefCommand::test_cmd_gef_config_set \ntests/commands/gef.py::GefCommand::test_cmd_gef_help \n[gw2] [ 23%] PASSED tests/commands/heap.py::HeapCommand::test_cmd_heap_chunks \ntests/commands/heap.py::HeapCommand::test_cmd_heap_chunks_max_size_filter \n[gw3] [ 24%] PASSED tests/commands/gef.py::GefCommand::test_cmd_gef_help \ntests/commands/gef.py::GefCommand::test_cmd_gef_install \n[gw0] [ 25%] PASSED tests/commands/edit_flags.py::EditFlagsCommand::test_cmd_edit_flags_enable \ntests/commands/edit_flags.py::EditFlagsCommand::test_cmd_edit_flags_toggle \n[gw3] [ 25%] PASSED tests/commands/gef.py::GefCommand::test_cmd_gef_install \ntests/commands/gef.py::GefCommand::test_cmd_gef_run_and_run \n[gw2] [ 26%] PASSED tests/commands/heap.py::HeapCommand::test_cmd_heap_chunks_max_size_filter \ntests/commands/heap.py::HeapCommand::test_cmd_heap_chunks_min_size_filter \n[gw0] [ 26%] PASSED tests/commands/edit_flags.py::EditFlagsCommand::test_cmd_edit_flags_toggle \ntests/commands/elf_info.py::ElfInfoCommand::test_cmd_elf_info \n[gw3] [ 27%] PASSED tests/commands/gef.py::GefCommand::test_cmd_gef_run_and_run \ntests/commands/gef.py::GefCommand::test_cmd_gef_save \n[gw0] [ 27%] PASSED tests/commands/elf_info.py::ElfInfoCommand::test_cmd_elf_info \ntests/commands/entry_break.py::EntryBreakCommand::test_cmd_entry_break \n[gw2] [ 28%] PASSED tests/commands/heap.py::HeapCommand::test_cmd_heap_chunks_min_size_filter \ntests/commands/heap.py::HeapCommand::test_cmd_heap_chunks_summary \n[gw3] [ 29%] PASSED tests/commands/gef.py::GefCommand::test_cmd_gef_save \ntests/commands/gef_remote.py::GefRemoteCommand::test_cmd_gef_remote \n[gw0] [ 29%] PASSED tests/commands/entry_break.py::EntryBreakCommand::test_cmd_entry_break \ntests/commands/format_string_helper.py::FormatStringHelperCommand::test_cmd_format_string_helper \n[gw2] [ 30%] PASSED tests/commands/heap.py::HeapCommand::test_cmd_heap_chunks_summary \ntests/commands/heap.py::HeapCommand::test_cmd_heap_chunks_with_count \n[gw1] [ 30%] PASSED tests/api/gef_memory.py::GefMemoryApi::test_func_parse_maps_remote_gdbserver \ntests/api/gef_memory.py::GefMemoryApi::test_func_parse_maps_remote_qemu \n[gw0] [ 31%] PASSED tests/commands/format_string_helper.py::FormatStringHelperCommand::test_cmd_format_string_helper \ntests/commands/functions.py::FunctionsCommand::test_cmd_functions \n[gw3] [ 32%] PASSED tests/commands/gef_remote.py::GefRemoteCommand::test_cmd_gef_remote \ntests/commands/gef_remote.py::GefRemoteCommand::test_cmd_gef_remote_qemu_user \n[gw2] [ 32%] PASSED tests/commands/heap.py::HeapCommand::test_cmd_heap_chunks_with_count \ntests/commands/heap.py::HeapCommand::test_cmd_heap_set_arena \n[gw0] [ 33%] PASSED tests/commands/functions.py::FunctionsCommand::test_cmd_functions \ntests/commands/gef.py::GefCommand::test_cmd_gef \n[gw1] [ 33%] PASSED tests/api/gef_memory.py::GefMemoryApi::test_func_parse_maps_remote_qemu \ntests/api/gef_memory.py::GefMemoryApi::test_func_parse_permissions \n[gw3] [ 34%] PASSED tests/commands/gef_remote.py::GefRemoteCommand::test_cmd_gef_remote_qemu_user \ntests/commands/gef_remote.py::GefRemoteCommand::test_cmd_target_remote \n[gw2] [ 35%] PASSED tests/commands/heap.py::HeapCommand::test_cmd_heap_set_arena \ntests/commands/heap.py::HeapCommandNonMain::test_cmd_heap_bins_non_main \n[gw0] [ 35%] PASSED tests/commands/gef.py::GefCommand::test_cmd_gef \ntests/commands/gef.py::GefCommand::test_cmd_gef_config \n[gw1] [ 36%] PASSED tests/api/gef_memory.py::GefMemoryApi::test_func_parse_permissions \ntests/api/gef_session.py::GefSessionApi::test_func_auxiliary_vector \n[gw2] [ 36%] PASSED tests/commands/heap.py::HeapCommandNonMain::test_cmd_heap_bins_non_main \ntests/commands/heap.py::HeapCommandNonMain::test_cmd_heap_bins_tcache \n[gw0] [ 37%] PASSED tests/commands/gef.py::GefCommand::test_cmd_gef_config \ntests/commands/heap.py::HeapCommandBins::test_cmd_heap_bins_large \n[gw3] [ 38%] PASSED tests/commands/gef_remote.py::GefRemoteCommand::test_cmd_target_remote \ntests/commands/got.py::GotCommand::test_cmd_got \n[gw1] [ 38%] PASSED tests/api/gef_session.py::GefSessionApi::test_func_auxiliary_vector \ntests/api/gef_session.py::GefSessionApi::test_func_get_filepath \n[gw2] [ 39%] PASSED tests/commands/heap.py::HeapCommandNonMain::test_cmd_heap_bins_tcache \ntests/commands/heap.py::HeapCommandNonMain::test_cmd_heap_chunks \n[gw0] [ 39%] PASSED tests/commands/heap.py::HeapCommandBins::test_cmd_heap_bins_large \ntests/commands/heap.py::HeapCommandBins::test_cmd_heap_bins_small \n[gw3] [ 40%] PASSED tests/commands/got.py::GotCommand::test_cmd_got \ntests/commands/heap.py::HeapCommand::test_cmd_heap_arenas \n[gw1] [ 41%] PASSED tests/api/gef_session.py::GefSessionApi::test_func_get_filepath \ntests/commands/memory.py::MemoryCommand::test_cmd_memory_unwatch \n[gw0] [ 41%] PASSED tests/commands/heap.py::HeapCommandBins::test_cmd_heap_bins_small \ntests/commands/heap.py::HeapCommandBins::test_cmd_heap_bins_unsorted \n[gw2] [ 42%] PASSED tests/commands/heap.py::HeapCommandNonMain::test_cmd_heap_chunks \ntests/commands/heap.py::HeapCommandMultipleHeaps::test_cmd_heap_chunks_mult_heaps \n[gw3] [ 42%] PASSED tests/commands/heap.py::HeapCommand::test_cmd_heap_arenas \ntests/commands/heap.py::HeapCommand::test_cmd_heap_chunk_no_arg \n[gw1] [ 43%] PASSED tests/commands/memory.py::MemoryCommand::test_cmd_memory_unwatch \ntests/commands/memory.py::MemoryCommand::test_cmd_memory_watch_basic \n[gw0] [ 44%] PASSED tests/commands/heap.py::HeapCommandBins::test_cmd_heap_bins_unsorted \ntests/commands/heap.py::HeapCommandTcache::test_cmd_heap_bins_tcache_all \n[gw2] [ 44%] PASSED tests/commands/heap.py::HeapCommandMultipleHeaps::test_cmd_heap_chunks_mult_heaps \ntests/commands/heap.py::HeapCommandClass::test_cmd_heap_chunks_summary_with_type_resolved \n[gw3] [ 45%] PASSED tests/commands/heap.py::HeapCommand::test_cmd_heap_chunk_no_arg \ntests/commands/heap.py::HeapCommand::test_cmd_heap_chunk_with_number \n[gw1] [ 45%] PASSED tests/commands/memory.py::MemoryCommand::test_cmd_memory_watch_basic \ntests/commands/memory.py::MemoryCommand::test_cmd_memory_watch_global_variable \n[gw3] [ 46%] PASSED tests/commands/heap.py::HeapCommand::test_cmd_heap_chunk_with_number \ntests/commands/nop.py::NopCommand::test_cmd_nop_bytes_break_instruction_force \n[gw2] [ 47%] PASSED tests/commands/heap.py::HeapCommandClass::test_cmd_heap_chunks_summary_with_type_resolved \ntests/commands/heap.py::HeapCommandFastBins::test_cmd_heap_bins_fast \n[gw0] [ 47%] PASSED tests/commands/heap.py::HeapCommandTcache::test_cmd_heap_bins_tcache_all \ntests/commands/heap_analysis.py::HeapAnalysisCommand::test_cmd_heap_analysis \n[gw1] [ 48%] PASSED tests/commands/memory.py::MemoryCommand::test_cmd_memory_watch_global_variable \ntests/commands/name_break.py::NameBreakCommand::test_cmd_name_break \n[gw3] [ 48%] PASSED tests/commands/nop.py::NopCommand::test_cmd_nop_bytes_break_instruction_force \ntests/commands/nop.py::NopCommand::test_cmd_nop_check_b_and_n_same_time \n[gw2] [ 49%] PASSED tests/commands/heap.py::HeapCommandFastBins::test_cmd_heap_bins_fast \ntests/commands/nop.py::NopCommand::test_cmd_nop_nop \n[gw1] [ 50%] PASSED tests/commands/name_break.py::NameBreakCommand::test_cmd_name_break \ntests/commands/nop.py::NopCommand::test_cmd_nop_bytes \n[gw0] [ 50%] PASSED tests/commands/heap_analysis.py::HeapAnalysisCommand::test_cmd_heap_analysis \ntests/commands/hexdump.py::HexdumpCommand::test_cmd_hexdump \n[gw3] [ 51%] PASSED tests/commands/nop.py::NopCommand::test_cmd_nop_check_b_and_n_same_time \ntests/commands/nop.py::NopCommand::test_cmd_nop_force_arg_break_instruction \n[gw2] [ 51%] PASSED tests/commands/nop.py::NopCommand::test_cmd_nop_nop \ntests/commands/nop.py::NopCommand::test_cmd_nop_nop_arg \n[gw1] [ 52%] PASSED tests/commands/nop.py::NopCommand::test_cmd_nop_bytes \ntests/commands/nop.py::NopCommand::test_cmd_nop_bytes_arg \n[gw0] [ 52%] PASSED tests/commands/hexdump.py::HexdumpCommand::test_cmd_hexdump \ntests/commands/highlight.py::HighlightCommand::test_cmd_highlight \n[gw3] [ 53%] PASSED tests/commands/nop.py::NopCommand::test_cmd_nop_force_arg_break_instruction \ntests/commands/nop.py::NopCommand::test_cmd_nop_i_arg \n[gw2] [ 54%] PASSED tests/commands/nop.py::NopCommand::test_cmd_nop_nop_arg \ntests/commands/nop.py::NopCommand::test_cmd_nop_nop_arg_multibnop_breaks \n[gw1] [ 54%] PASSED tests/commands/nop.py::NopCommand::test_cmd_nop_bytes_arg \ntests/commands/nop.py::NopCommand::test_cmd_nop_bytes_arg_nops_no_fit \n[gw0] [ 55%] PASSED tests/commands/highlight.py::HighlightCommand::test_cmd_highlight \ntests/commands/hijack_fd.py::HijackFdCommand::test_cmd_hijack_fd \n[gw3] [ 55%] PASSED tests/commands/nop.py::NopCommand::test_cmd_nop_i_arg \ntests/commands/nop.py::NopCommand::test_cmd_nop_i_arg_reaching_unmapped_area \n[gw2] [ 56%] PASSED tests/commands/nop.py::NopCommand::test_cmd_nop_nop_arg_multibnop_breaks \ntests/commands/nop.py::NopCommand::test_cmd_nop_nop_arg_multibnop_breaks_force \n[gw0] [ 57%] PASSED tests/commands/hijack_fd.py::HijackFdCommand::test_cmd_hijack_fd \ntests/commands/memory.py::MemoryCommand::test_cmd_memory_list \n[gw1] [ 57%] PASSED tests/commands/nop.py::NopCommand::test_cmd_nop_bytes_arg_nops_no_fit \ntests/commands/nop.py::NopCommand::test_cmd_nop_bytes_arg_nops_no_fit_force \n[gw3] [ 58%] PASSED tests/commands/nop.py::NopCommand::test_cmd_nop_i_arg_reaching_unmapped_area \ntests/commands/nop.py::NopCommand::test_cmd_nop_inactive \n[gw2] [ 58%] PASSED tests/commands/nop.py::NopCommand::test_cmd_nop_nop_arg_multibnop_breaks_force \ntests/commands/nop.py::NopCommand::test_cmd_nop_nop_break_instruction \n[gw0] [ 59%] PASSED tests/commands/memory.py::MemoryCommand::test_cmd_memory_list \ntests/commands/memory.py::MemoryCommand::test_cmd_memory_reset \n[gw1] [ 60%] PASSED tests/commands/nop.py::NopCommand::test_cmd_nop_bytes_arg_nops_no_fit_force \ntests/commands/nop.py::NopCommand::test_cmd_nop_bytes_break_instruction \n[gw3] [ 60%] PASSED tests/commands/nop.py::NopCommand::test_cmd_nop_inactive \ntests/commands/nop.py::NopCommand::test_cmd_nop_no_arg \n[gw2] [ 61%] PASSED tests/commands/nop.py::NopCommand::test_cmd_nop_nop_break_instruction \ntests/commands/nop.py::NopCommand::test_cmd_nop_nop_break_instruction_force \n[gw0] [ 61%] PASSED tests/commands/memory.py::MemoryCommand::test_cmd_memory_reset \ntests/commands/nop.py::NopCommandMmapKnownAddress::test_cmd_nop_invalid_end_address \n[gw1] [ 62%] PASSED tests/commands/nop.py::NopCommand::test_cmd_nop_bytes_break_instruction \ntests/commands/patch.py::PatchCommand::test_cmd_patch_string \n[gw3] [ 63%] PASSED tests/commands/nop.py::NopCommand::test_cmd_nop_no_arg \ntests/commands/nop.py::NopCommand::test_cmd_nop_no_arg_break_instruction \n[gw2] [ 63%] PASSED tests/commands/nop.py::NopCommand::test_cmd_nop_nop_break_instruction_force \ntests/commands/nop.py::NopCommandMmapKnownAddress::test_cmd_nop_as_bytes_invalid_end_address \n[gw0] [ 64%] PASSED tests/commands/nop.py::NopCommandMmapKnownAddress::test_cmd_nop_invalid_end_address \ntests/commands/patch.py::PatchCommand::test_cmd_patch \n[gw1] [ 64%] PASSED tests/commands/patch.py::PatchCommand::test_cmd_patch_string \ntests/commands/patch.py::PatchCommand::test_cmd_patch_word \n[gw3] [ 65%] PASSED tests/commands/nop.py::NopCommand::test_cmd_nop_no_arg_break_instruction \ntests/commands/pcustom.py::PcustomCommand::test_cmd_pcustom \n[gw0] [ 66%] PASSED tests/commands/patch.py::PatchCommand::test_cmd_patch \ntests/commands/patch.py::PatchCommand::test_cmd_patch_byte \n[gw2] [ 66%] PASSED tests/commands/nop.py::NopCommandMmapKnownAddress::test_cmd_nop_as_bytes_invalid_end_address \ntests/commands/pie.py::PieCommand::test_cmd_pie_breakpoint_run \n[gw1] [ 67%] PASSED tests/commands/patch.py::PatchCommand::test_cmd_patch_word \ntests/commands/patch.py::PatchCommandBss::test_cmd_patch_qword_symbol \n[gw3] [ 67%] PASSED tests/commands/pcustom.py::PcustomCommand::test_cmd_pcustom \ntests/commands/pcustom.py::PcustomCommand::test_cmd_pcustom_show \n[gw0] [ 68%] PASSED tests/commands/patch.py::PatchCommand::test_cmd_patch_byte \ntests/commands/patch.py::PatchCommand::test_cmd_patch_byte_bytearray \n[gw2] [ 69%] PASSED tests/commands/pie.py::PieCommand::test_cmd_pie_breakpoint_run \ntests/commands/print_format.py::PrintFormatCommand::test_cmd_print_format \n[gw1] [ 69%] PASSED tests/commands/patch.py::PatchCommandBss::test_cmd_patch_qword_symbol \ntests/commands/pattern.py::PatternCommand::test_cmd_pattern_create \n[gw3] [ 70%] PASSED tests/commands/pcustom.py::PcustomCommand::test_cmd_pcustom_show \ntests/commands/pie.py::PieCommand::test_cmd_pie \n[gw0] [ 70%] PASSED tests/commands/patch.py::PatchCommand::test_cmd_patch_byte_bytearray \ntests/commands/patch.py::PatchCommand::test_cmd_patch_dword \n[gw2] [ 71%] PASSED tests/commands/print_format.py::PrintFormatCommand::test_cmd_print_format \ntests/commands/print_format.py::PrintFormatCommand::test_cmd_print_format_bytearray \n[gw1] [ 72%] PASSED tests/commands/pattern.py::PatternCommand::test_cmd_pattern_create \ntests/commands/pattern.py::PatternCommand::test_cmd_pattern_search \n[gw3] [ 72%] PASSED tests/commands/pie.py::PieCommand::test_cmd_pie \ntests/commands/pie.py::PieCommand::test_cmd_pie_breakpoint_check \n[gw0] [ 73%] PASSED tests/commands/patch.py::PatchCommand::test_cmd_patch_dword \ntests/commands/patch.py::PatchCommand::test_cmd_patch_qword \n[gw2] [ 73%] PASSED tests/commands/print_format.py::PrintFormatCommand::test_cmd_print_format_bytearray \ntests/commands/process_search.py::ProcessSearchCommand::test_cmd_process_search1 \n[gw3] [ 74%] PASSED tests/commands/pie.py::PieCommand::test_cmd_pie_breakpoint_check \ntests/commands/pie.py::PieCommand::test_cmd_pie_breakpoint_delete \n[gw1] [ 75%] PASSED tests/commands/pattern.py::PatternCommand::test_cmd_pattern_search \ntests/commands/process_search.py::ProcessSearchCommand::test_cmd_process_search_smartscan \n[gw2] [ 75%] PASSED tests/commands/process_search.py::ProcessSearchCommand::test_cmd_process_search1 \ntests/commands/search_pattern.py::SearchPatternCommand::test_cmd_search_pattern \n[gw0] [ 76%] PASSED tests/commands/patch.py::PatchCommand::test_cmd_patch_qword \ntests/commands/registers.py::RegistersCommand::test_cmd_registers \n[gw3] [ 76%] PASSED tests/commands/pie.py::PieCommand::test_cmd_pie_breakpoint_delete \ntests/commands/shellcode.py::ShellcodeCommand::test_cmd_shellcode_get_nok \n[gw1] [ 77%] PASSED tests/commands/process_search.py::ProcessSearchCommand::test_cmd_process_search_smartscan \ntests/commands/process_search.py::ProcessSearchCommand::test_cmd_process_search_wildcart \n[gw2] [ 77%] PASSED tests/commands/search_pattern.py::SearchPatternCommand::test_cmd_search_pattern \ntests/commands/search_pattern.py::SearchPatternCommand::test_cmd_search_pattern_regex \n[gw0] [ 78%] PASSED tests/commands/registers.py::RegistersCommand::test_cmd_registers \ntests/commands/reset_cache.py::ResetCacheCommand::test_cmd_reset_cache \n[gw3] [ 79%] PASSED tests/commands/shellcode.py::ShellcodeCommand::test_cmd_shellcode_get_nok \ntests/commands/shellcode.py::ShellcodeCommand::test_cmd_shellcode_get_ok \n[gw1] [ 79%] PASSED tests/commands/process_search.py::ProcessSearchCommand::test_cmd_process_search_wildcart \ntests/commands/process_status.py::ProcessStatusCommand::test_cmd_process_status \n[gw2] [ 80%] PASSED tests/commands/search_pattern.py::SearchPatternCommand::test_cmd_search_pattern_regex \ntests/commands/shellcode.py::ShellcodeCommand::test_cmd_shellcode \n[gw0] [ 80%] PASSED tests/commands/reset_cache.py::ResetCacheCommand::test_cmd_reset_cache \ntests/commands/scan.py::ScanCommand::test_cmd_scan \n[gw3] [ 81%] PASSED tests/commands/shellcode.py::ShellcodeCommand::test_cmd_shellcode_get_ok \ntests/commands/shellcode.py::ShellcodeCommand::test_cmd_shellcode_search \n[gw1] [ 82%] PASSED tests/commands/process_status.py::ProcessStatusCommand::test_cmd_process_status \ntests/commands/skipi.py::SkipiCommand::test_cmd_skipi_no_arg \n[gw2] [ 82%] PASSED tests/commands/shellcode.py::ShellcodeCommand::test_cmd_shellcode \ntests/commands/skipi.py::SkipiCommand::test_cmd_skipi_two_instructions_from_location \n[gw0] [ 83%] PASSED tests/commands/scan.py::ScanCommand::test_cmd_scan \ntests/commands/stub.py::StubCommand::test_cmd_stub \n[gw3] [ 83%] PASSED tests/commands/shellcode.py::ShellcodeCommand::test_cmd_shellcode_search \ntests/commands/skipi.py::SkipiCommand::test_cmd_nop_inactive \n[gw1] [ 84%] PASSED tests/commands/skipi.py::SkipiCommand::test_cmd_skipi_no_arg \ntests/commands/skipi.py::SkipiCommand::test_cmd_skipi_skip_two_instructions \n[gw2] [ 85%] PASSED tests/commands/skipi.py::SkipiCommand::test_cmd_skipi_two_instructions_from_location \ntests/commands/smart_eval.py::SmartEvalCommand::test_cmd_smart_eval \n[gw3] [ 85%] PASSED tests/commands/skipi.py::SkipiCommand::test_cmd_nop_inactive \ntests/commands/trace_run.py::TraceRunCommand::test_cmd_trace_run \n[gw0] [ 86%] PASSED tests/commands/stub.py::StubCommand::test_cmd_stub \ntests/commands/theme.py::ThemeCommand::test_cmd_theme \n[gw1] [ 86%] PASSED tests/commands/skipi.py::SkipiCommand::test_cmd_skipi_skip_two_instructions \ntests/commands/version.py::VersionCommand::test_cmd_version \n[gw2] [ 87%] PASSED tests/commands/smart_eval.py::SmartEvalCommand::test_cmd_smart_eval \ntests/commands/vmmap.py::VmmapCommand::test_cmd_vmmap \n[gw0] [ 88%] PASSED tests/commands/theme.py::ThemeCommand::test_cmd_theme \ntests/commands/xinfo.py::XinfoCommand::test_cmd_xinfo \n[gw3] [ 88%] PASSED tests/commands/trace_run.py::TraceRunCommand::test_cmd_trace_run \ntests/commands/xfiles.py::XfilesCommand::test_cmd_xfiles \n[gw1] [ 89%] PASSED tests/commands/version.py::VersionCommand::test_cmd_version \ntests/commands/xinfo.py::XinfoCommandClass::test_cmd_xinfo_on_class \n[gw2] [ 89%] PASSED tests/commands/vmmap.py::VmmapCommand::test_cmd_vmmap \ntests/commands/xor_memory.py::XorMemoryCommand::test_cmd_xor_memory_display \n[gw0] [ 90%] PASSED tests/commands/xinfo.py::XinfoCommand::test_cmd_xinfo \ntests/commands/xor_memory.py::XorMemoryCommand::test_cmd_xor_memory_patch \n[gw3] [ 91%] PASSED tests/commands/xfiles.py::XfilesCommand::test_cmd_xfiles \ntests/config/__init__.py::TestGefConfigUnit::test_config_hook_validator \n[gw1] [ 91%] PASSED tests/commands/xinfo.py::XinfoCommandClass::test_cmd_xinfo_on_class \ntests/config/__init__.py::TestGefConfigUnit::test_config_libc_version \n[gw2] [ 92%] PASSED tests/commands/xor_memory.py::XorMemoryCommand::test_cmd_xor_memory_display \ntests/config/__init__.py::TestGefConfigUnit::test_config_show_opcodes_size \n[gw0] [ 92%] PASSED tests/commands/xor_memory.py::XorMemoryCommand::test_cmd_xor_memory_patch \ntests/config/__init__.py::TestGefConfigUnit::test_config_type_validator \n[gw3] [ 93%] PASSED tests/config/__init__.py::TestGefConfigUnit::test_config_hook_validator \ntests/functions/elf_sections.py::ElfSectionGdbFunction::test_func_base \n[gw1] [ 94%] PASSED tests/config/__init__.py::TestGefConfigUnit::test_config_libc_version \ntests/functions/elf_sections.py::ElfSectionGdbFunction::test_func_stack \n[gw2] [ 94%] PASSED tests/config/__init__.py::TestGefConfigUnit::test_config_show_opcodes_size \ntests/functions/elf_sections.py::ElfSectionGdbFunctionBss::test_func_bss \n[gw0] [ 95%] PASSED tests/config/__init__.py::TestGefConfigUnit::test_config_type_validator \ntests/functions/elf_sections.py::ElfSectionGdbFunctionHeap::test_func_got \n[gw3] [ 95%] PASSED tests/functions/elf_sections.py::ElfSectionGdbFunction::test_func_base \ntests/functions/elf_sections.py::ElfSectionGdbFunctionHeap::test_func_heap \n[gw1] [ 96%] PASSED tests/functions/elf_sections.py::ElfSectionGdbFunction::test_func_stack \ntests/regressions/gdbserver_connection.py::RegressionGdbserverConnection::test_can_establish_connection_to_gdbserver_again_after_disconnect \n[gw2] [ 97%] PASSED tests/functions/elf_sections.py::ElfSectionGdbFunctionBss::test_func_bss \ntests/regressions/registers_register_order.py::RegressionRegisterOrder::test_registers_show_registers_in_correct_order \n[gw0] [ 97%] PASSED tests/functions/elf_sections.py::ElfSectionGdbFunctionHeap::test_func_got \ntests/regressions/registers_register_order.py::RegressionRegisterOrderNested::test_context_correct_registers_refresh_with_frames \n[gw3] [ 98%] PASSED tests/functions/elf_sections.py::ElfSectionGdbFunctionHeap::test_func_heap \n[gw2] [ 98%] PASSED tests/regressions/registers_register_order.py::RegressionRegisterOrder::test_registers_show_registers_in_correct_order \n[gw0] [ 99%] PASSED tests/regressions/registers_register_order.py::RegressionRegisterOrderNested::test_context_correct_registers_refresh_with_frames \n[gw1] [100%] PASSED tests/regressions/gdbserver_connection.py::RegressionGdbserverConnection::test_can_establish_connection_to_gdbserver_again_after_disconnect \n\n=================================== FAILURES ===================================\n____________ GefMemoryApi.test_api_gef_memory_parse_info_proc_maps _____________\n[gw1] linux -- Python 3.10.12 /usr/bin/python3.10\nself = <tests.api.gef_memory.GefMemoryApi testMethod=test_api_gef_memory_parse_info_proc_maps>\n\n def test_api_gef_memory_parse_info_proc_maps(self):\n gdb, gef, root = self._gdb, self._gef, self._conn.root\n gdb.execute(\"start\")\n \n Section = root.eval(\"Section\")\n \n> if self.gdb_version < (11, 0):\nE TypeError: '<' not supported between instances of 'list' and 'tuple'\n\ntests/api/gef_memory.py:71: TypeError\n----------------------------- Captured stdout call -----------------------------\nReading symbols from /tmp/default.out...\n\u001b[32mGEF\u001b[0m for linux ready, type `\u001b[4m\u001b[33mgef\u001b[24m\u001b[0m' to start, `\u001b[4m\u001b[35mgef config\u001b[24m\u001b[0m' to configure\n\u001b[1m\u001b[32m88\u001b[0m commands loaded and \u001b[1m\u001b[34m5\u001b[0m functions added for GDB \u001b[1m\u001b[33m12.1\u001b[0m in 0.01ms using Python engine \u001b[1m\u001b[31m3.10\u001b[0m\n[+] Breaking at '0x1169'\n[Thread debugging using libthread_db enabled]\nUsing host libthread_db library \"/lib/x86_64-linux-gnu/libthread_db.so.1\".\n\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 registers \u2500\u2500\u2500\u2500\n$rax : 0x0000555555555169 \u2192 <main+0> endbr64 \n$rbx : 0x0 \n$rcx : 0x0000555555557db8 \u2192 0x0000555555555120 \u2192 <__do_global_dtors_aux+0> endbr64 \n$rdx : 0x00007fffffffd758 \u2192 0x00007fffffffdcc8 \u2192 \"GITHUB_STATE=/home/runner/work/_temp/_runner_file_[...]\"\n$rsp : 0x00007fffffffd600 \u2192 0x0000000000000002\n$rbp : 0x00007fffffffd630 \u2192 0x0000000000000001\n$rsi : 0x00007fffffffd748 \u2192 0x00007fffffffdcb7 \u2192 \"/tmp/default.out\"\n$rdi : 0x1 \n$rip : 0x0000555555555180 \u2192 <main+23> mov rax, QWORD PTR fs:0x28\n$r8 : 0x00007ffff7e1bf10 \u2192 0x0000000000000004\n$r9 : 0x00007ffff7fc9040 \u2192 <_dl_fini+0> endbr64 \n$r10 : 0x00007ffff7fc3908 \u2192 0x000d00120000000e\n$r11 : 0x00007ffff7fde660 \u2192 <_dl_audit_preinit+0> endbr64 \n$r12 : 0x00007fffffffd748 \u2192 0x00007fffffffdcb7 \u2192 \"/tmp/default.out\"\n$r13 : 0x0000555555555169 \u2192 <main+0> endbr64 \n$r14 : 0x0000555555557db8 \u2192 0x0000555555555120 \u2192 <__do_global_dtors_aux+0> endbr64 \n$r15 : 0x00007ffff7ffd040 \u2192 0x00007ffff7ffe2e0 \u2192 0x0000555555554000 \u2192 jg 0x555555554047\n$eflags: [zero carry PARITY adjust sign trap INTERRUPT direction overflow resume virtualx86 identification]\n$cs: 0x33 $ss: 0x2b $ds: 0x00 $es: 0x00 $fs: 0x00 $gs: 0x00 \n\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 stack \u2500\u2500\u2500\u2500\n0x00007fffffffd600\u2502+0x0000: 0x0000000000000002\t \u2190 $rsp\n0x00007fffffffd608\u2502+0x0008: 0x00007fffffffd758 \u2192 0x00007fffffffdcc8 \u2192 \"GITHUB_STATE=/home/runner/work/_temp/_runner_file_[...]\"\n0x00007fffffffd610\u2502+0x0010: 0x00007fffffffd748 \u2192 0x00007fffffffdcb7 \u2192 \"/tmp/default.out\"\n0x00007fffffffd618\u2502+0x0018: 0x0000000100000064 (\"d\"?)\n0x00007fffffffd620\u2502+0x0020: 0x0000000000001000\n0x00007fffffffd628\u2502+0x0028: 0x0000555555555080 \u2192 <_start+0> endbr64 \n0x00007fffffffd630\u2502+0x0030: 0x0000000000000001\t \u2190 $rbp\n0x00007fffffffd638\u2502+0x0038: 0x00007ffff7c29d90 \u2192 <__libc_start_call_main+128> mov edi, eax\n\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 code:x86:64 \u2500\u2500\u2500\u2500\n 0x555555555175 <main+12> mov DWORD PTR [rbp-0x14], edi\n 0x555555555178 <main+15> mov QWORD PTR [rbp-0x20], rsi\n 0x55555555517c <main+19> mov QWORD PTR [rbp-0x28], rdx\n \u2192 0x555555555180 <main+23> mov rax, QWORD PTR fs:0x28\n 0x555555555189 <main+32> mov QWORD PTR [rbp-0x8], rax\n 0x55555555518d <main+36> xor eax, eax\n 0x55555555518f <main+38> lea rax, [rip+0xe6e] # 0x555555556004\n 0x555555555196 <main+45> mov rdi, rax\n 0x555555555199 <main+48> call 0x555555555060 <puts@plt>\n\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 source:default.c+13 \u2500\u2500\u2500\u2500\n 8\t #include <stdlib.h>\n 9\t #include <unistd.h>\n 10\t \n 11\t \n 12\t int main(int argc, char** argv, char** envp)\n \u2192 13\t {\n 14\t printf(\"Hello World!\\n\");\n 15\t return EXIT_SUCCESS;\n 16\t }\n\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 threads \u2500\u2500\u2500\u2500\n[#0] Id 1, Name: \"default.out\", stopped 0x555555555180 in main (), reason: BREAKPOINT\n\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 trace \u2500\u2500\u2500\u2500\n[#0] 0x555555555180 \u2192 main(argc=0x1, argv=0x7fffffffd748, envp=0x7fffffffd758)\n\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\n\n----------------------------- Captured stderr call -----------------------------\nError while writing index for `/tmp/default.out': mkstemp: No such file or directory.\nTERM environment variable not set.\nWARNING:REMOTEDEBUG/25467:keyboard interrupt!\n____ GefMemoryApi.test_api_gef_memory_parse_info_proc_maps_expected_format _____\n[gw1] linux -- Python 3.10.12 /usr/bin/python3.10\nself = <tests.api.gef_memory.GefMemoryApi testMethod=test_api_gef_memory_parse_info_proc_maps_expected_format>\n\n def test_api_gef_memory_parse_info_proc_maps_expected_format(self):\n> if self.gdb_version < (11, 0):\nE TypeError: '<' not supported between instances of 'list' and 'tuple'\n\ntests/api/gef_memory.py:36: TypeError\n----------------------------- Captured stdout call -----------------------------\nReading symbols from /tmp/default.out...\n\u001b[32mGEF\u001b[0m for linux ready, type `\u001b[4m\u001b[33mgef\u001b[24m\u001b[0m' to start, `\u001b[4m\u001b[35mgef config\u001b[24m\u001b[0m' to configure\n\u001b[1m\u001b[32m88\u001b[0m commands loaded and \u001b[1m\u001b[34m5\u001b[0m functions added for GDB \u001b[1m\u001b[33m12.1\u001b[0m in 0.00ms using Python engine \u001b[1m\u001b[31m3.10\u001b[0m\n\n----------------------------- Captured stderr call -----------------------------\nError while writing index for `/tmp/default.out': mkstemp: No such file or directory.\nWARNING:REMOTEDEBUG/8616:keyboard interrupt!\n=========================== short test summary info ============================\nFAILED tests/api/gef_memory.py::GefMemoryApi::test_api_gef_memory_parse_info_proc_maps\nFAILED tests/api/gef_memory.py::GefMemoryApi::test_api_gef_memory_parse_info_proc_maps_expected_format\n============= 2 failed, 165 passed, 1 skipped in 61.42s (0:01:01) ==============\n##[error]Process completed with exit code 1.\n"}, {"step_name": "Run Unit tests on ubuntu-20.04/8_Run Tests.txt", "log": "##[group]Run make -C tests/binaries -j 4\n\u001b[36;1mmake -C tests/binaries -j 4\u001b[0m\n\u001b[36;1mpython3.8 -m pytest --forked -n 4 -v -k \"not benchmark\" tests/\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\nenv:\n PY_VER: 3.8\n GEF_CI_NB_CPU: 4\n GEF_CI_ARCH: x86_64\n GEF_CI_CACHE_DIR: /home/runner/.cache/pip\n##[endgroup]\nmake: Entering directory '/home/runner/work/gef/gef/tests/binaries'\n[+] Building '/tmp/heap.out'\n[+] Building '/tmp/bss.out'\n[+] Building '/tmp/canary.out'\n[+] Building '/tmp/heap-multiple-heaps.out'\n[+] Building '/tmp/checksec-no-canary.out'\n[+] Building '/tmp/format-string-helper.out'\n[+] Building '/tmp/checksec-no-nx.out'\n[+] Building '/tmp/heap-non-main.out'\n[+] Building '/tmp/nested2.out'\n[+] Building '/tmp/mmap-known-address.out'\n[+] Building '/tmp/heap-analysis.out'\n[+] Building '/tmp/default.out'\n[+] Building '/tmp/heap-tcache.out'\n[+] Building '/tmp/nested.out'\n[+] Building '/tmp/checksec-no-pie.out'\n[+] Building '/tmp/heap-bins.out'\n[+] Building '/tmp/pattern.out'\n[+] Building '/tmp/memwatch.out'\n[+] Building '/tmp/pcustom.out'\n[+] Building '/tmp/heap-fastbins.out'\n[+] Building '/tmp/class.out'\nmake: Leaving directory '/home/runner/work/gef/gef/tests/binaries'\n============================= test session starts ==============================\nplatform linux -- Python 3.8.10, pytest-7.4.4, pluggy-1.3.0 -- /usr/bin/python3.8\ncachedir: .pytest_cache\nbenchmark: 4.0.0 (defaults: timer=time.perf_counter disable_gc=False min_rounds=5 min_time=0.000005 max_time=1.0 calibration_precision=10 warmup=False warmup_iterations=100000)\nrootdir: /home/runner/work/gef/gef/tests\nconfigfile: pytest.ini\nplugins: forked-1.6.0, xdist-3.5.0, benchmark-4.0.0, cov-4.1.0\ncreated: 4/4 workers\n4 workers [168 items]\n\nscheduling tests via LoadScheduling\n\ntests/api/deprecated.py::GefFuncDeprecatedApi::test_deprecated_elf_values \ntests/api/gef_session.py::GefSessionApi::test_func_get_pid \ntests/api/gef_heap.py::GefHeapApi::test_func_gef_heap_tidx2size \ntests/commands/aliases.py::AliasesCommand::test_cmd_aliases_rm \n[gw0] [ 0%] PASSED tests/api/deprecated.py::GefFuncDeprecatedApi::test_deprecated_elf_values \ntests/api/deprecated.py::GefFuncDeprecatedApi::test_deprecated_gef_attributes \n[gw3] [ 1%] PASSED tests/commands/aliases.py::AliasesCommand::test_cmd_aliases_rm \ntests/commands/aslr.py::AslrCommand::test_cmd_aslr_show \n[gw2] [ 1%] PASSED tests/api/gef_heap.py::GefHeapApi::test_func_gef_heap_tidx2size \ntests/api/gef_memory.py::GefMemoryApi::test_api_gef_memory_only_running \n[gw1] [ 2%] PASSED tests/api/gef_session.py::GefSessionApi::test_func_get_pid \ntests/api/gef_session.py::GefSessionApi::test_root_dir_local \n[gw3] [ 2%] PASSED tests/commands/aslr.py::AslrCommand::test_cmd_aslr_show \ntests/commands/aslr.py::AslrCommand::test_cmd_aslr_toggle \n[gw0] [ 3%] PASSED tests/api/deprecated.py::GefFuncDeprecatedApi::test_deprecated_gef_attributes \ntests/api/gef_arch.py::GefArchApi::test_api_gef_arch_ptrsize \n[gw2] [ 4%] PASSED tests/api/gef_memory.py::GefMemoryApi::test_api_gef_memory_only_running \ntests/api/gef_memory.py::GefMemoryApi::test_api_gef_memory_parse_info_proc_maps \n[gw1] [ 4%] PASSED tests/api/gef_session.py::GefSessionApi::test_root_dir_local \ntests/api/gef_session.py::GefSessionApi::test_root_dir_qemu \n[gw3] [ 5%] PASSED tests/commands/aslr.py::AslrCommand::test_cmd_aslr_toggle \ntests/commands/canary.py::CanaryCommand::test_cmd_canary \n[gw0] [ 5%] PASSED tests/api/gef_arch.py::GefArchApi::test_api_gef_arch_ptrsize \ntests/api/gef_arch.py::GefArchApi::test_api_gef_arch_x86 \n[gw0] [ 6%] SKIPPED tests/api/gef_arch.py::GefArchApi::test_api_gef_arch_x86 \ntests/api/gef_arch.py::GefArchApi::test_api_gef_arch_x86_64 \n[gw1] [ 7%] PASSED tests/api/gef_session.py::GefSessionApi::test_root_dir_qemu \ntests/api/gef_session.py::GefSessionApi::test_root_dir_remote \n[gw2] [ 7%] FAILED tests/api/gef_memory.py::GefMemoryApi::test_api_gef_memory_parse_info_proc_maps \ntests/api/gef_memory.py::GefMemoryApi::test_api_gef_memory_parse_info_proc_maps_expected_format \n[gw3] [ 8%] PASSED tests/commands/canary.py::CanaryCommand::test_cmd_canary \ntests/commands/canary.py::CanaryCommand::test_overwrite_canary \n[gw0] [ 8%] PASSED tests/api/gef_arch.py::GefArchApi::test_api_gef_arch_x86_64 \ntests/api/gef_disassemble.py::GefDisassembleApiFunction::test_func_gef_disassemble \n[gw2] [ 9%] FAILED tests/api/gef_memory.py::GefMemoryApi::test_api_gef_memory_parse_info_proc_maps_expected_format \ntests/api/gef_memory.py::GefMemoryApi::test_func_parse_maps_local_procfs \n[gw3] [ 10%] PASSED tests/commands/canary.py::CanaryCommand::test_overwrite_canary \ntests/commands/checksec.py::ChecksecCommandNoCanary::test_cmd_checksec \n[gw0] [ 10%] PASSED tests/api/gef_disassemble.py::GefDisassembleApiFunction::test_func_gef_disassemble \ntests/api/gef_disassemble.py::GefDisassembleApiFunction::test_func_gef_disassemble_page_border \n[gw1] [ 11%] PASSED tests/api/gef_session.py::GefSessionApi::test_root_dir_remote \ntests/api/misc.py::MiscFunctionTest::test_func_gef_convenience \n[gw2] [ 11%] PASSED tests/api/gef_memory.py::GefMemoryApi::test_func_parse_maps_local_procfs \ntests/api/gef_memory.py::GefMemoryApi::test_func_parse_maps_remote_gdbserver \n[gw3] [ 12%] PASSED tests/commands/checksec.py::ChecksecCommandNoCanary::test_cmd_checksec \ntests/commands/checksec.py::ChecksecCommandNoNx::test_cmd_checksec \n[gw0] [ 13%] PASSED tests/api/gef_disassemble.py::GefDisassembleApiFunction::test_func_gef_disassemble_page_border \ntests/api/gef_heap.py::GefHeapApi::test_class_glibcarena_main_arena \n[gw1] [ 13%] PASSED tests/api/misc.py::MiscFunctionTest::test_func_gef_convenience \ntests/api/misc.py::MiscFunctionTest::test_func_parse_address \n[gw3] [ 14%] PASSED tests/commands/checksec.py::ChecksecCommandNoNx::test_cmd_checksec \ntests/commands/checksec.py::ChecksecCommandNoPie::test_cmd_checksec \n[gw1] [ 14%] PASSED tests/api/misc.py::MiscFunctionTest::test_func_parse_address \ntests/api/misc.py::MiscFunctionTest::test_func_show_last_exception \n[gw0] [ 15%] PASSED tests/api/gef_heap.py::GefHeapApi::test_class_glibcarena_main_arena \ntests/api/gef_heap.py::GefHeapApi::test_func_gef_heap_csize2tidx \n[gw3] [ 16%] PASSED tests/commands/checksec.py::ChecksecCommandNoPie::test_cmd_checksec \ntests/commands/dereference.py::DereferenceCommand::test_cmd_dereference \n[gw1] [ 16%] PASSED tests/api/misc.py::MiscFunctionTest::test_func_show_last_exception \ntests/api/misc.py::MiscFunctionTest::test_func_which \n[gw0] [ 17%] PASSED tests/api/gef_heap.py::GefHeapApi::test_func_gef_heap_csize2tidx \ntests/api/gef_heap.py::GefHeapApi::test_func_gef_heap_malloc_align_address \n[gw3] [ 17%] PASSED tests/commands/dereference.py::DereferenceCommand::test_cmd_dereference \ntests/commands/dereference.py::DereferenceCommand::test_cmd_dereference_backwards \n[gw1] [ 18%] PASSED tests/api/misc.py::MiscFunctionTest::test_func_which \ntests/commands/aliases.py::AliasesCommand::test_cmd_aliases_add \n[gw0] [ 19%] PASSED tests/api/gef_heap.py::GefHeapApi::test_func_gef_heap_malloc_align_address \ntests/commands/dereference.py::DereferenceCommand::test_cmd_dereference_forwards \n[gw3] [ 19%] PASSED tests/commands/dereference.py::DereferenceCommand::test_cmd_dereference_backwards \ntests/commands/gef.py::GefCommand::test_cmd_gef_config_get \n[gw1] [ 20%] PASSED tests/commands/aliases.py::AliasesCommand::test_cmd_aliases_add \ntests/commands/aliases.py::AliasesCommand::test_cmd_aliases_list \n[gw0] [ 20%] PASSED tests/commands/dereference.py::DereferenceCommand::test_cmd_dereference_forwards \ntests/commands/edit_flags.py::EditFlagsCommand::test_cmd_edit_flags_disable \n[gw3] [ 21%] PASSED tests/commands/gef.py::GefCommand::test_cmd_gef_config_get \ntests/commands/gef.py::GefCommand::test_cmd_gef_config_set \n[gw1] [ 22%] PASSED tests/commands/aliases.py::AliasesCommand::test_cmd_aliases_list \ntests/commands/heap.py::HeapCommand::test_cmd_heap_chunks \n[gw3] [ 22%] PASSED tests/commands/gef.py::GefCommand::test_cmd_gef_config_set \ntests/commands/gef.py::GefCommand::test_cmd_gef_help \n[gw0] [ 23%] PASSED tests/commands/edit_flags.py::EditFlagsCommand::test_cmd_edit_flags_disable \ntests/commands/edit_flags.py::EditFlagsCommand::test_cmd_edit_flags_enable \n[gw1] [ 23%] PASSED tests/commands/heap.py::HeapCommand::test_cmd_heap_chunks \ntests/commands/heap.py::HeapCommand::test_cmd_heap_chunks_max_size_filter \n[gw3] [ 24%] PASSED tests/commands/gef.py::GefCommand::test_cmd_gef_help \ntests/commands/gef.py::GefCommand::test_cmd_gef_install \n[gw0] [ 25%] PASSED tests/commands/edit_flags.py::EditFlagsCommand::test_cmd_edit_flags_enable \ntests/commands/edit_flags.py::EditFlagsCommand::test_cmd_edit_flags_toggle \n[gw1] [ 25%] PASSED tests/commands/heap.py::HeapCommand::test_cmd_heap_chunks_max_size_filter \ntests/commands/heap.py::HeapCommand::test_cmd_heap_chunks_min_size_filter \n[gw3] [ 26%] PASSED tests/commands/gef.py::GefCommand::test_cmd_gef_install \ntests/commands/gef.py::GefCommand::test_cmd_gef_run_and_run \n[gw0] [ 26%] PASSED tests/commands/edit_flags.py::EditFlagsCommand::test_cmd_edit_flags_toggle \ntests/commands/elf_info.py::ElfInfoCommand::test_cmd_elf_info \n[gw3] [ 27%] PASSED tests/commands/gef.py::GefCommand::test_cmd_gef_run_and_run \ntests/commands/gef.py::GefCommand::test_cmd_gef_save \n[gw1] [ 27%] PASSED tests/commands/heap.py::HeapCommand::test_cmd_heap_chunks_min_size_filter \ntests/commands/heap.py::HeapCommand::test_cmd_heap_chunks_summary \n[gw0] [ 28%] PASSED tests/commands/elf_info.py::ElfInfoCommand::test_cmd_elf_info \ntests/commands/entry_break.py::EntryBreakCommand::test_cmd_entry_break \n[gw3] [ 29%] PASSED tests/commands/gef.py::GefCommand::test_cmd_gef_save \ntests/commands/gef_remote.py::GefRemoteCommand::test_cmd_gef_remote \n[gw1] [ 29%] PASSED tests/commands/heap.py::HeapCommand::test_cmd_heap_chunks_summary \ntests/commands/heap.py::HeapCommand::test_cmd_heap_chunks_with_count \n[gw0] [ 30%] PASSED tests/commands/entry_break.py::EntryBreakCommand::test_cmd_entry_break \ntests/commands/format_string_helper.py::FormatStringHelperCommand::test_cmd_format_string_helper \n[gw1] [ 30%] PASSED tests/commands/heap.py::HeapCommand::test_cmd_heap_chunks_with_count \ntests/commands/heap.py::HeapCommand::test_cmd_heap_set_arena \n[gw0] [ 31%] PASSED tests/commands/format_string_helper.py::FormatStringHelperCommand::test_cmd_format_string_helper \ntests/commands/functions.py::FunctionsCommand::test_cmd_functions \n[gw3] [ 32%] PASSED tests/commands/gef_remote.py::GefRemoteCommand::test_cmd_gef_remote \ntests/commands/gef_remote.py::GefRemoteCommand::test_cmd_gef_remote_qemu_user \n[gw2] [ 32%] PASSED tests/api/gef_memory.py::GefMemoryApi::test_func_parse_maps_remote_gdbserver \ntests/api/gef_memory.py::GefMemoryApi::test_func_parse_maps_remote_qemu \n[gw0] [ 33%] PASSED tests/commands/functions.py::FunctionsCommand::test_cmd_functions \ntests/commands/gef.py::GefCommand::test_cmd_gef \n[gw3] [ 33%] PASSED tests/commands/gef_remote.py::GefRemoteCommand::test_cmd_gef_remote_qemu_user \ntests/commands/gef_remote.py::GefRemoteCommand::test_cmd_target_remote \n[gw1] [ 34%] PASSED tests/commands/heap.py::HeapCommand::test_cmd_heap_set_arena \ntests/commands/heap.py::HeapCommandNonMain::test_cmd_heap_bins_non_main \n[gw2] [ 35%] PASSED tests/api/gef_memory.py::GefMemoryApi::test_func_parse_maps_remote_qemu \ntests/api/gef_memory.py::GefMemoryApi::test_func_parse_permissions \n[gw0] [ 35%] PASSED tests/commands/gef.py::GefCommand::test_cmd_gef \ntests/commands/gef.py::GefCommand::test_cmd_gef_config \n[gw1] [ 36%] PASSED tests/commands/heap.py::HeapCommandNonMain::test_cmd_heap_bins_non_main \ntests/commands/heap.py::HeapCommandNonMain::test_cmd_heap_bins_tcache \n[gw2] [ 36%] PASSED tests/api/gef_memory.py::GefMemoryApi::test_func_parse_permissions \ntests/api/gef_session.py::GefSessionApi::test_func_auxiliary_vector \n[gw0] [ 37%] PASSED tests/commands/gef.py::GefCommand::test_cmd_gef_config \ntests/commands/heap.py::HeapCommandBins::test_cmd_heap_bins_large \n[gw3] [ 38%] PASSED tests/commands/gef_remote.py::GefRemoteCommand::test_cmd_target_remote \ntests/commands/got.py::GotCommand::test_cmd_got \n[gw1] [ 38%] PASSED tests/commands/heap.py::HeapCommandNonMain::test_cmd_heap_bins_tcache \ntests/commands/heap.py::HeapCommandNonMain::test_cmd_heap_chunks \n[gw2] [ 39%] PASSED tests/api/gef_session.py::GefSessionApi::test_func_auxiliary_vector \ntests/api/gef_session.py::GefSessionApi::test_func_get_filepath \n[gw0] [ 39%] PASSED tests/commands/heap.py::HeapCommandBins::test_cmd_heap_bins_large \ntests/commands/heap.py::HeapCommandBins::test_cmd_heap_bins_small \n[gw3] [ 40%] PASSED tests/commands/got.py::GotCommand::test_cmd_got \ntests/commands/heap.py::HeapCommand::test_cmd_heap_arenas \n[gw2] [ 41%] PASSED tests/api/gef_session.py::GefSessionApi::test_func_get_filepath \ntests/commands/memory.py::MemoryCommand::test_cmd_memory_unwatch \n[gw1] [ 41%] PASSED tests/commands/heap.py::HeapCommandNonMain::test_cmd_heap_chunks \ntests/commands/heap.py::HeapCommandMultipleHeaps::test_cmd_heap_chunks_mult_heaps \n[gw0] [ 42%] PASSED tests/commands/heap.py::HeapCommandBins::test_cmd_heap_bins_small \ntests/commands/heap.py::HeapCommandBins::test_cmd_heap_bins_unsorted \n[gw3] [ 42%] PASSED tests/commands/heap.py::HeapCommand::test_cmd_heap_arenas \ntests/commands/heap.py::HeapCommand::test_cmd_heap_chunk_no_arg \n[gw2] [ 43%] PASSED tests/commands/memory.py::MemoryCommand::test_cmd_memory_unwatch \ntests/commands/memory.py::MemoryCommand::test_cmd_memory_watch_basic \n[gw1] [ 44%] PASSED tests/commands/heap.py::HeapCommandMultipleHeaps::test_cmd_heap_chunks_mult_heaps \ntests/commands/heap.py::HeapCommandClass::test_cmd_heap_chunks_summary_with_type_resolved \n[gw0] [ 44%] PASSED tests/commands/heap.py::HeapCommandBins::test_cmd_heap_bins_unsorted \ntests/commands/heap.py::HeapCommandTcache::test_cmd_heap_bins_tcache_all \n[gw3] [ 45%] PASSED tests/commands/heap.py::HeapCommand::test_cmd_heap_chunk_no_arg \ntests/commands/heap.py::HeapCommand::test_cmd_heap_chunk_with_number \n[gw2] [ 45%] PASSED tests/commands/memory.py::MemoryCommand::test_cmd_memory_watch_basic \ntests/commands/memory.py::MemoryCommand::test_cmd_memory_watch_global_variable \n[gw1] [ 46%] PASSED tests/commands/heap.py::HeapCommandClass::test_cmd_heap_chunks_summary_with_type_resolved \ntests/commands/heap.py::HeapCommandFastBins::test_cmd_heap_bins_fast \n[gw3] [ 47%] PASSED tests/commands/heap.py::HeapCommand::test_cmd_heap_chunk_with_number \ntests/commands/nop.py::NopCommand::test_cmd_nop_bytes_break_instruction_force \n[gw0] [ 47%] PASSED tests/commands/heap.py::HeapCommandTcache::test_cmd_heap_bins_tcache_all \ntests/commands/heap_analysis.py::HeapAnalysisCommand::test_cmd_heap_analysis \n[gw1] [ 48%] PASSED tests/commands/heap.py::HeapCommandFastBins::test_cmd_heap_bins_fast \ntests/commands/nop.py::NopCommand::test_cmd_nop_nop \n[gw2] [ 48%] PASSED tests/commands/memory.py::MemoryCommand::test_cmd_memory_watch_global_variable \ntests/commands/name_break.py::NameBreakCommand::test_cmd_name_break \n[gw3] [ 49%] PASSED tests/commands/nop.py::NopCommand::test_cmd_nop_bytes_break_instruction_force \ntests/commands/nop.py::NopCommand::test_cmd_nop_check_b_and_n_same_time \n[gw0] [ 50%] PASSED tests/commands/heap_analysis.py::HeapAnalysisCommand::test_cmd_heap_analysis \ntests/commands/hexdump.py::HexdumpCommand::test_cmd_hexdump \n[gw1] [ 50%] PASSED tests/commands/nop.py::NopCommand::test_cmd_nop_nop \ntests/commands/nop.py::NopCommand::test_cmd_nop_nop_arg \n[gw2] [ 51%] PASSED tests/commands/name_break.py::NameBreakCommand::test_cmd_name_break \ntests/commands/nop.py::NopCommand::test_cmd_nop_bytes \n[gw3] [ 51%] PASSED tests/commands/nop.py::NopCommand::test_cmd_nop_check_b_and_n_same_time \ntests/commands/nop.py::NopCommand::test_cmd_nop_force_arg_break_instruction \n[gw0] [ 52%] PASSED tests/commands/hexdump.py::HexdumpCommand::test_cmd_hexdump \ntests/commands/highlight.py::HighlightCommand::test_cmd_highlight \n[gw1] [ 52%] PASSED tests/commands/nop.py::NopCommand::test_cmd_nop_nop_arg \ntests/commands/nop.py::NopCommand::test_cmd_nop_nop_arg_multibnop_breaks \n[gw2] [ 53%] PASSED tests/commands/nop.py::NopCommand::test_cmd_nop_bytes \ntests/commands/nop.py::NopCommand::test_cmd_nop_bytes_arg \n[gw3] [ 54%] PASSED tests/commands/nop.py::NopCommand::test_cmd_nop_force_arg_break_instruction \ntests/commands/nop.py::NopCommand::test_cmd_nop_i_arg \n[gw0] [ 54%] PASSED tests/commands/highlight.py::HighlightCommand::test_cmd_highlight \ntests/commands/hijack_fd.py::HijackFdCommand::test_cmd_hijack_fd \n[gw1] [ 55%] PASSED tests/commands/nop.py::NopCommand::test_cmd_nop_nop_arg_multibnop_breaks \ntests/commands/nop.py::NopCommand::test_cmd_nop_nop_arg_multibnop_breaks_force \n[gw3] [ 55%] PASSED tests/commands/nop.py::NopCommand::test_cmd_nop_i_arg \ntests/commands/nop.py::NopCommand::test_cmd_nop_i_arg_reaching_unmapped_area \n[gw2] [ 56%] PASSED tests/commands/nop.py::NopCommand::test_cmd_nop_bytes_arg \ntests/commands/nop.py::NopCommand::test_cmd_nop_bytes_arg_nops_no_fit \n[gw0] [ 57%] PASSED tests/commands/hijack_fd.py::HijackFdCommand::test_cmd_hijack_fd \ntests/commands/memory.py::MemoryCommand::test_cmd_memory_list \n[gw1] [ 57%] PASSED tests/commands/nop.py::NopCommand::test_cmd_nop_nop_arg_multibnop_breaks_force \ntests/commands/nop.py::NopCommand::test_cmd_nop_nop_break_instruction \n[gw3] [ 58%] PASSED tests/commands/nop.py::NopCommand::test_cmd_nop_i_arg_reaching_unmapped_area \ntests/commands/nop.py::NopCommand::test_cmd_nop_inactive \n[gw2] [ 58%] PASSED tests/commands/nop.py::NopCommand::test_cmd_nop_bytes_arg_nops_no_fit \ntests/commands/nop.py::NopCommand::test_cmd_nop_bytes_arg_nops_no_fit_force \n[gw0] [ 59%] PASSED tests/commands/memory.py::MemoryCommand::test_cmd_memory_list \ntests/commands/memory.py::MemoryCommand::test_cmd_memory_reset \n[gw1] [ 60%] PASSED tests/commands/nop.py::NopCommand::test_cmd_nop_nop_break_instruction \ntests/commands/nop.py::NopCommand::test_cmd_nop_nop_break_instruction_force \n[gw3] [ 60%] PASSED tests/commands/nop.py::NopCommand::test_cmd_nop_inactive \ntests/commands/nop.py::NopCommand::test_cmd_nop_no_arg \n[gw2] [ 61%] PASSED tests/commands/nop.py::NopCommand::test_cmd_nop_bytes_arg_nops_no_fit_force \ntests/commands/nop.py::NopCommand::test_cmd_nop_bytes_break_instruction \n[gw0] [ 61%] PASSED tests/commands/memory.py::MemoryCommand::test_cmd_memory_reset \ntests/commands/nop.py::NopCommandMmapKnownAddress::test_cmd_nop_invalid_end_address \n[gw1] [ 62%] PASSED tests/commands/nop.py::NopCommand::test_cmd_nop_nop_break_instruction_force \ntests/commands/nop.py::NopCommandMmapKnownAddress::test_cmd_nop_as_bytes_invalid_end_address \n[gw3] [ 63%] PASSED tests/commands/nop.py::NopCommand::test_cmd_nop_no_arg \ntests/commands/nop.py::NopCommand::test_cmd_nop_no_arg_break_instruction \n[gw2] [ 63%] PASSED tests/commands/nop.py::NopCommand::test_cmd_nop_bytes_break_instruction \ntests/commands/patch.py::PatchCommand::test_cmd_patch_string \n[gw0] [ 64%] PASSED tests/commands/nop.py::NopCommandMmapKnownAddress::test_cmd_nop_invalid_end_address \ntests/commands/patch.py::PatchCommand::test_cmd_patch \n[gw1] [ 64%] PASSED tests/commands/nop.py::NopCommandMmapKnownAddress::test_cmd_nop_as_bytes_invalid_end_address \ntests/commands/pcustom.py::PcustomCommand::test_cmd_pcustom \n[gw3] [ 65%] PASSED tests/commands/nop.py::NopCommand::test_cmd_nop_no_arg_break_instruction \ntests/commands/pie.py::PieCommand::test_cmd_pie_breakpoint_run \n[gw2] [ 66%] PASSED tests/commands/patch.py::PatchCommand::test_cmd_patch_string \ntests/commands/patch.py::PatchCommand::test_cmd_patch_word \n[gw0] [ 66%] PASSED tests/commands/patch.py::PatchCommand::test_cmd_patch \ntests/commands/patch.py::PatchCommand::test_cmd_patch_byte \n[gw1] [ 67%] PASSED tests/commands/pcustom.py::PcustomCommand::test_cmd_pcustom \ntests/commands/pcustom.py::PcustomCommand::test_cmd_pcustom_show \n[gw3] [ 67%] PASSED tests/commands/pie.py::PieCommand::test_cmd_pie_breakpoint_run \ntests/commands/print_format.py::PrintFormatCommand::test_cmd_print_format \n[gw2] [ 68%] PASSED tests/commands/patch.py::PatchCommand::test_cmd_patch_word \ntests/commands/patch.py::PatchCommandBss::test_cmd_patch_qword_symbol \n[gw0] [ 69%] PASSED tests/commands/patch.py::PatchCommand::test_cmd_patch_byte \ntests/commands/patch.py::PatchCommand::test_cmd_patch_byte_bytearray \n[gw3] [ 69%] PASSED tests/commands/print_format.py::PrintFormatCommand::test_cmd_print_format \ntests/commands/print_format.py::PrintFormatCommand::test_cmd_print_format_bytearray \n[gw1] [ 70%] PASSED tests/commands/pcustom.py::PcustomCommand::test_cmd_pcustom_show \ntests/commands/pie.py::PieCommand::test_cmd_pie \n[gw2] [ 70%] PASSED tests/commands/patch.py::PatchCommandBss::test_cmd_patch_qword_symbol \ntests/commands/pattern.py::PatternCommand::test_cmd_pattern_create \n[gw0] [ 71%] PASSED tests/commands/patch.py::PatchCommand::test_cmd_patch_byte_bytearray \ntests/commands/patch.py::PatchCommand::test_cmd_patch_dword \n[gw1] [ 72%] PASSED tests/commands/pie.py::PieCommand::test_cmd_pie \ntests/commands/pie.py::PieCommand::test_cmd_pie_breakpoint_check \n[gw3] [ 72%] PASSED tests/commands/print_format.py::PrintFormatCommand::test_cmd_print_format_bytearray \ntests/commands/process_search.py::ProcessSearchCommand::test_cmd_process_search1 \n[gw2] [ 73%] PASSED tests/commands/pattern.py::PatternCommand::test_cmd_pattern_create \ntests/commands/pattern.py::PatternCommand::test_cmd_pattern_search \n[gw0] [ 73%] PASSED tests/commands/patch.py::PatchCommand::test_cmd_patch_dword \ntests/commands/patch.py::PatchCommand::test_cmd_patch_qword \n[gw1] [ 74%] PASSED tests/commands/pie.py::PieCommand::test_cmd_pie_breakpoint_check \ntests/commands/pie.py::PieCommand::test_cmd_pie_breakpoint_delete \n[gw3] [ 75%] PASSED tests/commands/process_search.py::ProcessSearchCommand::test_cmd_process_search1 \ntests/commands/process_search.py::ProcessSearchCommand::test_cmd_process_search_smartscan \n[gw0] [ 75%] PASSED tests/commands/patch.py::PatchCommand::test_cmd_patch_qword \ntests/commands/search_pattern.py::SearchPatternCommand::test_cmd_search_pattern \n[gw1] [ 76%] PASSED tests/commands/pie.py::PieCommand::test_cmd_pie_breakpoint_delete \ntests/commands/shellcode.py::ShellcodeCommand::test_cmd_shellcode_get_nok \n[gw2] [ 76%] PASSED tests/commands/pattern.py::PatternCommand::test_cmd_pattern_search \ntests/commands/registers.py::RegistersCommand::test_cmd_registers \n[gw3] [ 77%] PASSED tests/commands/process_search.py::ProcessSearchCommand::test_cmd_process_search_smartscan \ntests/commands/process_search.py::ProcessSearchCommand::test_cmd_process_search_wildcart \n[gw0] [ 77%] PASSED tests/commands/search_pattern.py::SearchPatternCommand::test_cmd_search_pattern \ntests/commands/search_pattern.py::SearchPatternCommand::test_cmd_search_pattern_regex \n[gw1] [ 78%] PASSED tests/commands/shellcode.py::ShellcodeCommand::test_cmd_shellcode_get_nok \ntests/commands/shellcode.py::ShellcodeCommand::test_cmd_shellcode_get_ok \n[gw2] [ 79%] PASSED tests/commands/registers.py::RegistersCommand::test_cmd_registers \ntests/commands/reset_cache.py::ResetCacheCommand::test_cmd_reset_cache \n[gw3] [ 79%] PASSED tests/commands/process_search.py::ProcessSearchCommand::test_cmd_process_search_wildcart \ntests/commands/process_status.py::ProcessStatusCommand::test_cmd_process_status \n[gw0] [ 80%] PASSED tests/commands/search_pattern.py::SearchPatternCommand::test_cmd_search_pattern_regex \ntests/commands/shellcode.py::ShellcodeCommand::test_cmd_shellcode \n[gw2] [ 80%] PASSED tests/commands/reset_cache.py::ResetCacheCommand::test_cmd_reset_cache \ntests/commands/scan.py::ScanCommand::test_cmd_scan \n[gw1] [ 81%] PASSED tests/commands/shellcode.py::ShellcodeCommand::test_cmd_shellcode_get_ok \ntests/commands/shellcode.py::ShellcodeCommand::test_cmd_shellcode_search \n[gw3] [ 82%] PASSED tests/commands/process_status.py::ProcessStatusCommand::test_cmd_process_status \ntests/commands/skipi.py::SkipiCommand::test_cmd_skipi_no_arg \n[gw0] [ 82%] PASSED tests/commands/shellcode.py::ShellcodeCommand::test_cmd_shellcode \ntests/commands/skipi.py::SkipiCommand::test_cmd_skipi_two_instructions_from_location \n[gw1] [ 83%] PASSED tests/commands/shellcode.py::ShellcodeCommand::test_cmd_shellcode_search \ntests/commands/skipi.py::SkipiCommand::test_cmd_nop_inactive \n[gw3] [ 83%] PASSED tests/commands/skipi.py::SkipiCommand::test_cmd_skipi_no_arg \ntests/commands/skipi.py::SkipiCommand::test_cmd_skipi_skip_two_instructions \n[gw2] [ 84%] PASSED tests/commands/scan.py::ScanCommand::test_cmd_scan \ntests/commands/stub.py::StubCommand::test_cmd_stub \n[gw0] [ 85%] PASSED tests/commands/skipi.py::SkipiCommand::test_cmd_skipi_two_instructions_from_location \ntests/commands/smart_eval.py::SmartEvalCommand::test_cmd_smart_eval \n[gw1] [ 85%] PASSED tests/commands/skipi.py::SkipiCommand::test_cmd_nop_inactive \ntests/commands/theme.py::ThemeCommand::test_cmd_theme \n[gw3] [ 86%] PASSED tests/commands/skipi.py::SkipiCommand::test_cmd_skipi_skip_two_instructions \ntests/commands/trace_run.py::TraceRunCommand::test_cmd_trace_run \n[gw0] [ 86%] PASSED tests/commands/smart_eval.py::SmartEvalCommand::test_cmd_smart_eval \ntests/commands/vmmap.py::VmmapCommand::test_cmd_vmmap \n[gw1] [ 87%] PASSED tests/commands/theme.py::ThemeCommand::test_cmd_theme \ntests/commands/xfiles.py::XfilesCommand::test_cmd_xfiles \n[gw2] [ 88%] PASSED tests/commands/stub.py::StubCommand::test_cmd_stub \ntests/commands/version.py::VersionCommand::test_cmd_version \n[gw3] [ 88%] PASSED tests/commands/trace_run.py::TraceRunCommand::test_cmd_trace_run \ntests/commands/xinfo.py::XinfoCommand::test_cmd_xinfo \n[gw2] [ 89%] PASSED tests/commands/version.py::VersionCommand::test_cmd_version \ntests/commands/xor_memory.py::XorMemoryCommand::test_cmd_xor_memory_patch \n[gw0] [ 89%] PASSED tests/commands/vmmap.py::VmmapCommand::test_cmd_vmmap \ntests/commands/xinfo.py::XinfoCommandClass::test_cmd_xinfo_on_class \n[gw1] [ 90%] PASSED tests/commands/xfiles.py::XfilesCommand::test_cmd_xfiles \ntests/commands/xor_memory.py::XorMemoryCommand::test_cmd_xor_memory_display \n[gw3] [ 91%] PASSED tests/commands/xinfo.py::XinfoCommand::test_cmd_xinfo \ntests/config/__init__.py::TestGefConfigUnit::test_config_hook_validator \n[gw2] [ 91%] PASSED tests/commands/xor_memory.py::XorMemoryCommand::test_cmd_xor_memory_patch \ntests/config/__init__.py::TestGefConfigUnit::test_config_libc_version \n[gw0] [ 92%] PASSED tests/commands/xinfo.py::XinfoCommandClass::test_cmd_xinfo_on_class \ntests/config/__init__.py::TestGefConfigUnit::test_config_show_opcodes_size \n[gw1] [ 92%] PASSED tests/commands/xor_memory.py::XorMemoryCommand::test_cmd_xor_memory_display \ntests/config/__init__.py::TestGefConfigUnit::test_config_type_validator \n[gw3] [ 93%] PASSED tests/config/__init__.py::TestGefConfigUnit::test_config_hook_validator \ntests/functions/elf_sections.py::ElfSectionGdbFunction::test_func_base \n[gw2] [ 94%] PASSED tests/config/__init__.py::TestGefConfigUnit::test_config_libc_version \ntests/functions/elf_sections.py::ElfSectionGdbFunction::test_func_stack \n[gw1] [ 94%] PASSED tests/config/__init__.py::TestGefConfigUnit::test_config_type_validator \ntests/functions/elf_sections.py::ElfSectionGdbFunctionHeap::test_func_got \n[gw0] [ 95%] PASSED tests/config/__init__.py::TestGefConfigUnit::test_config_show_opcodes_size \ntests/functions/elf_sections.py::ElfSectionGdbFunctionBss::test_func_bss \n[gw3] [ 95%] PASSED tests/functions/elf_sections.py::ElfSectionGdbFunction::test_func_base \ntests/functions/elf_sections.py::ElfSectionGdbFunctionHeap::test_func_heap \n[gw2] [ 96%] PASSED tests/functions/elf_sections.py::ElfSectionGdbFunction::test_func_stack \ntests/regressions/gdbserver_connection.py::RegressionGdbserverConnection::test_can_establish_connection_to_gdbserver_again_after_disconnect \n[gw1] [ 97%] PASSED tests/functions/elf_sections.py::ElfSectionGdbFunctionHeap::test_func_got \ntests/regressions/registers_register_order.py::RegressionRegisterOrder::test_registers_show_registers_in_correct_order \n[gw0] [ 97%] PASSED tests/functions/elf_sections.py::ElfSectionGdbFunctionBss::test_func_bss \ntests/regressions/registers_register_order.py::RegressionRegisterOrderNested::test_context_correct_registers_refresh_with_frames \n[gw3] [ 98%] PASSED tests/functions/elf_sections.py::ElfSectionGdbFunctionHeap::test_func_heap \n[gw1] [ 98%] PASSED tests/regressions/registers_register_order.py::RegressionRegisterOrder::test_registers_show_registers_in_correct_order \n[gw0] [ 99%] PASSED tests/regressions/registers_register_order.py::RegressionRegisterOrderNested::test_context_correct_registers_refresh_with_frames \n[gw2] [100%] PASSED tests/regressions/gdbserver_connection.py::RegressionGdbserverConnection::test_can_establish_connection_to_gdbserver_again_after_disconnect \n\n=================================== FAILURES ===================================\n/home/runner/.local/lib/python3.8/site-packages/pytest_benchmark/logger.py:46: PytestBenchmarkWarning: Benchmarks are automatically disabled because xdist plugin is active.Benchmarks cannot be performed reliably in a parallelized environment.\n warner(PytestBenchmarkWarning(text))\n____________ GefMemoryApi.test_api_gef_memory_parse_info_proc_maps _____________\n[gw2] linux -- Python 3.8.10 /usr/bin/python3.8\nself = <tests.api.gef_memory.GefMemoryApi testMethod=test_api_gef_memory_parse_info_proc_maps>\n\n def test_api_gef_memory_parse_info_proc_maps(self):\n gdb, gef, root = self._gdb, self._gef, self._conn.root\n gdb.execute(\"start\")\n \n Section = root.eval(\"Section\")\n \n> if self.gdb_version < (11, 0):\nE TypeError: '<' not supported between instances of 'list' and 'tuple'\n\ntests/api/gef_memory.py:71: TypeError\n----------------------------- Captured stdout call -----------------------------\nReading symbols from /tmp/default.out...\n\u001b[32mGEF\u001b[0m for linux ready, type `\u001b[4m\u001b[33mgef\u001b[24m\u001b[0m' to start, `\u001b[4m\u001b[35mgef config\u001b[24m\u001b[0m' to configure\n\u001b[1m\u001b[32m88\u001b[0m commands loaded and \u001b[1m\u001b[34m5\u001b[0m functions added for GDB \u001b[1m\u001b[33m10.2\u001b[0m in 0.00ms using Python engine \u001b[1m\u001b[31m3.8\u001b[0m\n[+] Breaking at '0x1169'\n\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 registers \u2500\u2500\u2500\u2500\n$rax : 0x0000555555555169 \u2192 <main+0> endbr64 \n$rbx : 0x00005555555551c0 \u2192 <__libc_csu_init+0> endbr64 \n$rcx : 0x00005555555551c0 \u2192 <__libc_csu_init+0> endbr64 \n$rdx : 0x00007fffffffd758 \u2192 0x00007fffffffdcd0 \u2192 \"GITHUB_STATE=/home/runner/work/_temp/_runner_file_[...]\"\n$rsp : 0x00007fffffffd620 \u2192 0x00007ffff7fb62e8 \u2192 0x0000000000000000\n$rbp : 0x00007fffffffd650 \u2192 0x0000000000000000\n$rsi : 0x00007fffffffd748 \u2192 0x00007fffffffdcbf \u2192 \"/tmp/default.out\"\n$rdi : 0x1 \n$rip : 0x0000555555555180 \u2192 <main+23> mov rax, QWORD PTR fs:0x28\n$r8 : 0x0 \n$r9 : 0x00007ffff7fe0d60 \u2192 <_dl_fini+0> endbr64 \n$r10 : 0x00007ffff7ffcf68 \u2192 0x000000006ffffff0\n$r11 : 0x202 \n$r12 : 0x0000555555555080 \u2192 <_start+0> endbr64 \n$r13 : 0x00007fffffffd740 \u2192 0x0000000000000001\n$r14 : 0x0 \n$r15 : 0x0 \n$eflags: [zero carry parity adjust sign trap INTERRUPT direction overflow resume virtualx86 identification]\n$cs: 0x33 $ss: 0x2b $ds: 0x00 $es: 0x00 $fs: 0x00 $gs: 0x00 \n\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 stack \u2500\u2500\u2500\u2500\n0x00007fffffffd620\u2502+0x0000: 0x00007ffff7fb62e8 \u2192 0x0000000000000000\t \u2190 $rsp\n0x00007fffffffd628\u2502+0x0008: 0x00007fffffffd758 \u2192 0x00007fffffffdcd0 \u2192 \"GITHUB_STATE=/home/runner/work/_temp/_runner_file_[...]\"\n0x00007fffffffd630\u2502+0x0010: 0x00007fffffffd748 \u2192 0x00007fffffffdcbf \u2192 \"/tmp/default.out\"\n0x00007fffffffd638\u2502+0x0018: 0x0000000155555080\n0x00007fffffffd640\u2502+0x0020: 0x00007fffffffd740 \u2192 0x0000000000000001\n0x00007fffffffd648\u2502+0x0028: 0x0000000000000000\n0x00007fffffffd650\u2502+0x0030: 0x0000000000000000\t \u2190 $rbp\n0x00007fffffffd658\u2502+0x0038: 0x00007ffff7de9083 \u2192 <__libc_start_main+243> mov edi, eax\n\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 code:x86:64 \u2500\u2500\u2500\u2500\n 0x555555555175 <main+12> mov DWORD PTR [rbp-0x14], edi\n 0x555555555178 <main+15> mov QWORD PTR [rbp-0x20], rsi\n 0x55555555517c <main+19> mov QWORD PTR [rbp-0x28], rdx\n \u2192 0x555555555180 <main+23> mov rax, QWORD PTR fs:0x28\n 0x555555555189 <main+32> mov QWORD PTR [rbp-0x8], rax\n 0x55555555518d <main+36> xor eax, eax\n 0x55555555518f <main+38> lea rdi, [rip+0xe6e] # 0x555555556004\n 0x555555555196 <main+45> call 0x555555555060 <puts@plt>\n 0x55555555519b <main+50> mov eax, 0x0\n\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 source:default.c+13 \u2500\u2500\u2500\u2500\n 8\t #include <stdlib.h>\n 9\t #include <unistd.h>\n 10\t \n 11\t \n 12\t int main(int argc, char** argv, char** envp)\n \u2192 13\t {\n 14\t printf(\"Hello World!\\n\");\n 15\t return EXIT_SUCCESS;\n 16\t }\n\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 threads \u2500\u2500\u2500\u2500\n[#0] Id 1, Name: \"default.out\", stopped 0x555555555180 in main (), reason: BREAKPOINT\n\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 trace \u2500\u2500\u2500\u2500\n[#0] 0x555555555180 \u2192 main(argc=0x1, argv=0x7fffffffd748, envp=0x7fffffffd758)\n\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\n\n----------------------------- Captured stderr call -----------------------------\nError while writing index for `/tmp/default.out': mkstemp: No such file or directory.\nTERM environment variable not set.\nWARNING:REMOTEDEBUG/7965:keyboard interrupt!\n____ GefMemoryApi.test_api_gef_memory_parse_info_proc_maps_expected_format _____\n[gw2] linux -- Python 3.8.10 /usr/bin/python3.8\nself = <tests.api.gef_memory.GefMemoryApi testMethod=test_api_gef_memory_parse_info_proc_maps_expected_format>\n\n def test_api_gef_memory_parse_info_proc_maps_expected_format(self):\n> if self.gdb_version < (11, 0):\nE TypeError: '<' not supported between instances of 'list' and 'tuple'\n\ntests/api/gef_memory.py:36: TypeError\n----------------------------- Captured stdout call -----------------------------\nReading symbols from /tmp/default.out...\n\u001b[32mGEF\u001b[0m for linux ready, type `\u001b[4m\u001b[33mgef\u001b[24m\u001b[0m' to start, `\u001b[4m\u001b[35mgef config\u001b[24m\u001b[0m' to configure\n\u001b[1m\u001b[32m88\u001b[0m commands loaded and \u001b[1m\u001b[34m5\u001b[0m functions added for GDB \u001b[1m\u001b[33m10.2\u001b[0m in 0.00ms using Python engine \u001b[1m\u001b[31m3.8\u001b[0m\n\n----------------------------- Captured stderr call -----------------------------\nError while writing index for `/tmp/default.out': mkstemp: No such file or directory.\nWARNING:REMOTEDEBUG/29939:keyboard interrupt!\n=========================== short test summary info ============================\nFAILED tests/api/gef_memory.py::GefMemoryApi::test_api_gef_memory_parse_info_proc_maps\nFAILED tests/api/gef_memory.py::GefMemoryApi::test_api_gef_memory_parse_info_proc_maps_expected_format\n============= 2 failed, 165 passed, 1 skipped in 60.83s (0:01:00) ==============\n##[error]Process completed with exit code 1.\n"}], "diff": "diff --git a/tests/base.py b/tests/base.py\nindex 1537c53..3d0f3dc 100644\n--- a/tests/base.py\n+++ b/tests/base.py\n@@ -112,4 +112,4 @@ pi start_rpyc_service({self._port})\n def gdb_version(self) -> Tuple[int, int]:\n res = [int(d) for d in re.search(r\"(\\d+)\\D(\\d+)\", self._gdb.VERSION).groups()] \n assert len(res) >= 2\n- return res\n+ return tuple(res)\n", "difficulty": 2, "changed_files": ["tests/base.py"], "commit_link": "https://github.com/hugsy/gef/tree/76e35eca93562514943c5842cf2b0b8ec94a4763"}
data/python/7aa2f79.json ADDED
The diff for this file is too large to render. See raw diff
 
data/python/7e3720f.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"language": "Python", "id": 34, "repo_owner": "lightly-ai", "repo_name": "lightly", "head_branch": "master", "workflow_name": "check setup.py", "workflow_filename": "test_setup.yml", "workflow_path": ".github/workflows/test_setup.yml", "contributor": "jameschapman19", "sha_fail": "7e3720f246234572faf7ea093d228d995684f1c8", "sha_success": "b9d0ed9e3ee5fcaa5c41035f0af3dd1f169a1432", "workflow": "name: check setup.py\n\non:\n push:\n paths-ignore:\n - 'docs/**'\n pull_request:\n paths-ignore:\n - 'docs/**'\n workflow_dispatch:\n\njobs:\n test:\n name: Test setup.py\n runs-on: ubuntu-latest\n\n\n steps:\n - name: Checkout Code\n uses: actions/checkout@v3\n - name: Hack to get setup-python to work on nektos/act\n run: |\n if [ ! -f \"/etc/lsb-release\" ] ; then\n echo \"DISTRIB_RELEASE=18.04\" > /etc/lsb-release\n fi\n - name: Set up Python 3.7\n uses: actions/setup-python@v4\n with:\n python-version: 3.7\n - uses: actions/cache@v2\n with:\n path: ${{ env.pythonLocation }}\n key: cache_v2_${{ env.pythonLocation }}-${{ hashFiles('requirements/**') }}\n - name: Install Dependencies and lightly\n run: pip install .\n - name: basic tests of CLI\n run: |\n LIGHTLY_SERVER_LOCATION=\"localhost:-1\"\n lightly-crop --help\n lightly-train --help\n lightly-embed --help\n lightly-magic --help\n lightly-download --help\n lightly-version\n - name: test of CLI on a real dataset\n run: |\n LIGHTLY_SERVER_LOCATION=\"localhost:-1\"\n git clone https://github.com/alexeygrigorev/clothing-dataset-small clothing_dataset_small\n INPUT_DIR_1=\"clothing_dataset_small/test/dress\"\n lightly-train input_dir=$INPUT_DIR_1 trainer.max_epochs=1 loader.num_workers=6\n lightly-embed input_dir=$INPUT_DIR_1\n", "logs": [{"step_name": "Test setup.py/7_basic tests of CLI.txt", "log": "##[group]Run LIGHTLY_SERVER_LOCATION=\"localhost:-1\"\n\u001b[36;1mLIGHTLY_SERVER_LOCATION=\"localhost:-1\"\u001b[0m\n\u001b[36;1mlightly-crop --help\u001b[0m\n\u001b[36;1mlightly-train --help\u001b[0m\n\u001b[36;1mlightly-embed --help\u001b[0m\n\u001b[36;1mlightly-magic --help\u001b[0m\n\u001b[36;1mlightly-download --help\u001b[0m\n\u001b[36;1mlightly-version\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.7.17/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.7.17/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.7.17/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.7.17/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.7.17/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.7.17/x64/lib\n##[endgroup]\nTraceback (most recent call last):\n File \"/opt/hostedtoolcache/Python/3.7.17/x64/bin/lightly-crop\", line 5, in <module>\n from lightly.cli.crop_cli import entry\n File \"/opt/hostedtoolcache/Python/3.7.17/x64/lib/python3.7/site-packages/lightly/cli/__init__.py\", line 12, in <module>\n from lightly.cli.lightly_cli import lightly_cli\n File \"/opt/hostedtoolcache/Python/3.7.17/x64/lib/python3.7/site-packages/lightly/cli/lightly_cli.py\", line 16, in <module>\n from lightly.cli.train_cli import _train_cli\n File \"/opt/hostedtoolcache/Python/3.7.17/x64/lib/python3.7/site-packages/lightly/cli/train_cli.py\", line 31, in <module>\n from lightly.loss import NTXentLoss\n File \"/opt/hostedtoolcache/Python/3.7.17/x64/lib/python3.7/site-packages/lightly/loss/__init__.py\", line 6, in <module>\n from lightly.loss.dcl_loss import DCLLoss, DCLWLoss\n File \"/opt/hostedtoolcache/Python/3.7.17/x64/lib/python3.7/site-packages/lightly/loss/dcl_loss.py\", line 9, in <module>\n from lightly.utils import dist\n File \"/opt/hostedtoolcache/Python/3.7.17/x64/lib/python3.7/site-packages/lightly/utils/dist.py\", line 1, in <module>\n from typing import Any, Callable, Literal, Optional, Tuple, TypeVar, Union\nImportError: cannot import name 'Literal' from 'typing' (/opt/hostedtoolcache/Python/3.7.17/x64/lib/python3.7/typing.py)\n##[error]Process completed with exit code 1.\n"}], "diff": "diff --git a/lightly/utils/dist.py b/lightly/utils/dist.py\nindex e56b43a3..7c8c7f77 100644\n--- a/lightly/utils/dist.py\n+++ b/lightly/utils/dist.py\n@@ -1,4 +1,4 @@\n-from typing import Any, Callable, Literal, Optional, Tuple, TypeVar, Union\n+from typing import Any, Callable, Optional, Tuple, TypeVar, Union\n \n import torch\n import torch.distributed as dist\n", "difficulty": 1, "changed_files": ["lightly/utils/dist.py"], "commit_link": "https://github.com/lightly-ai/lightly/tree/7e3720f246234572faf7ea093d228d995684f1c8"}
data/python/7f35134.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"language": "Python", "id": 28, "repo_owner": "encode", "repo_name": "httpx", "head_branch": "add-ssl-context-argument", "workflow_name": "Test Suite", "workflow_filename": "test-suite.yml", "workflow_path": ".github/workflows/test-suite.yml", "contributor": "karpetrosyan", "sha_fail": "7f351340260c165e18ccd7c83dc783bb371b3797", "sha_success": "8d5983adaf778096bb95171dcafe858f6c6d51bb", "workflow": "---\nname: Test Suite\n\non:\n push:\n branches: [\"master\"]\n pull_request:\n branches: [\"master\"]\n\njobs:\n tests:\n name: \"Python ${{ matrix.python-version }}\"\n runs-on: \"ubuntu-latest\"\n\n strategy:\n matrix:\n python-version: [\"3.8\", \"3.9\", \"3.10\", \"3.11\", \"3.12\"]\n\n steps:\n - uses: \"actions/checkout@v4\"\n - uses: \"actions/setup-python@v5\"\n with:\n python-version: \"${{ matrix.python-version }}\"\n allow-prereleases: true\n - name: \"Install dependencies\"\n run: \"scripts/install\"\n - name: \"Run linting checks\"\n run: \"scripts/check\"\n - name: \"Build package & docs\"\n run: \"scripts/build\"\n - name: \"Run tests\"\n run: \"scripts/test\"\n - name: \"Enforce coverage\"\n run: \"scripts/coverage\"\n", "logs": [{"step_name": "Python 3.8/8_Enforce coverage.txt", "log": "##[group]Run scripts/coverage\n\u001b[36;1mscripts/coverage\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.8.18/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.8.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.8.18/x64/lib\n##[endgroup]\n+ coverage report --show-missing --skip-covered --fail-under=100\nName Stmts Miss Cover Missing\n------------------------------------------------\nhttpx/_config.py 133 1 99% 139\n------------------------------------------------\nTOTAL 7570 1 99%\n\n58 files skipped due to complete coverage.\nCoverage failure: total of 99 is less than fail-under=100\n##[error]Process completed with exit code 2.\n"}, {"step_name": "Python 3.9/8_Enforce coverage.txt", "log": "##[group]Run scripts/coverage\n\u001b[36;1mscripts/coverage\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.9.18/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib\n##[endgroup]\n+ coverage report --show-missing --skip-covered --fail-under=100\nName Stmts Miss Cover Missing\n------------------------------------------------\nhttpx/_config.py 133 1 99% 139\n------------------------------------------------\nTOTAL 7570 1 99%\n\n58 files skipped due to complete coverage.\nCoverage failure: total of 99 is less than fail-under=100\n##[error]Process completed with exit code 2.\n"}, {"step_name": "Python 3.10/8_Enforce coverage.txt", "log": "##[group]Run scripts/coverage\n\u001b[36;1mscripts/coverage\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.10.13/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.10.13/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.10.13/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.10.13/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.10.13/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.10.13/x64/lib\n##[endgroup]\n+ coverage report --show-missing --skip-covered --fail-under=100\nName Stmts Miss Cover Missing\n------------------------------------------------\nhttpx/_config.py 133 1 99% 139\n------------------------------------------------\nTOTAL 7577 1 99%\n\n58 files skipped due to complete coverage.\nCoverage failure: total of 99 is less than fail-under=100\n##[error]Process completed with exit code 2.\n"}, {"step_name": "Python 3.11/8_Enforce coverage.txt", "log": "##[group]Run scripts/coverage\n\u001b[36;1mscripts/coverage\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.11.7/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.11.7/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.11.7/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.11.7/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.11.7/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.11.7/x64/lib\n##[endgroup]\n+ coverage report --show-missing --skip-covered --fail-under=100\nName Stmts Miss Cover Missing\n------------------------------------------------\nhttpx/_config.py 133 1 99% 139\n------------------------------------------------\nTOTAL 7577 1 99%\n\n58 files skipped due to complete coverage.\nCoverage failure: total of 99 is less than fail-under=100\n##[error]Process completed with exit code 2.\n"}], "diff": "diff --git a/docs/advanced/ssl.md b/docs/advanced/ssl.md\nindex 1eec32f..2c3e679 100644\n--- a/docs/advanced/ssl.md\n+++ b/docs/advanced/ssl.md\n@@ -14,7 +14,7 @@ You can configure the verification using `httpx.SSLContext()`.\n ```pycon\n >>> ssl_context = httpx.SSLContext()\n >>> ssl_context\n-<SSLContext [verify=True]>\n+SSLContext(verify=True)\n >>> httpx.get(\"https://www.example.com\", ssl_context=ssl_context)\n httpx.ConnectError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: certificate has expired (_ssl.c:997)\n ```\n@@ -24,7 +24,7 @@ For example, you can use this to disable verification completely and allow insec\n ```pycon\n >>> no_verify = httpx.SSLContext(verify=False)\n >>> no_verify\n-<SSLContext [verify=False]>\n+SSLContext(verify=False)\n >>> httpx.get(\"https://expired.badssl.com/\", ssl_context=no_verify)\n <Response [200 OK]>\n ```\ndiff --git a/httpx/_config.py b/httpx/_config.py\nindex 303388d..8cfeea7 100644\n--- a/httpx/_config.py\n+++ b/httpx/_config.py\n@@ -136,7 +136,9 @@ class SSLContext(ssl.SSLContext):\n )\n \n def __repr__(self) -> str:\n- return f\"<SSLContext [verify={self.verify}]>\"\n+ class_name = self.__class__.__name__\n+\n+ return f\"{class_name}(verify={self.verify!r})\"\n \n def __new__(\n cls,\ndiff --git a/tests/test_config.py b/tests/test_config.py\nindex 530b150..b39efa8 100644\n--- a/tests/test_config.py\n+++ b/tests/test_config.py\n@@ -74,6 +74,16 @@ def test_SSLContext_with_get_request(server, cert_pem_file):\n assert response.status_code == 200\n \n \n+def test_SSLContext_repr():\n+ ssl_context = httpx.SSLContext()\n+\n+ assert repr(ssl_context) == \"SSLContext(verify=True)\"\n+\n+ ssl_context = httpx.SSLContext(verify=certifi.where())\n+\n+ assert repr(ssl_context) == \"SSLContext(verify='{}')\".format(certifi.where())\n+\n+\n def test_limits_repr():\n limits = httpx.Limits(max_connections=100)\n expected = (\n", "difficulty": 3, "changed_files": ["docs/advanced/ssl.md", "httpx/_config.py", "tests/test_config.py"], "commit_link": "https://github.com/encode/httpx/tree/7f351340260c165e18ccd7c83dc783bb371b3797"}
data/python/83b5e4b.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"language": "Python", "id": 76, "repo_owner": "encode", "repo_name": "httpx", "head_branch": "network-options", "workflow_name": "Test Suite", "workflow_filename": "test-suite.yml", "workflow_path": ".github/workflows/test-suite.yml", "contributor": "encode", "sha_fail": "83b5e4bf130d204fbb25b26a341c62aee4fc2d0f", "sha_success": "913ea35324c99c2052331008ea8a4b8037e5b4cb", "workflow": "---\nname: Test Suite\n\non:\n push:\n branches: [\"master\"]\n pull_request:\n branches: [\"master\"]\n\njobs:\n tests:\n name: \"Python ${{ matrix.python-version }}\"\n runs-on: \"ubuntu-latest\"\n\n strategy:\n matrix:\n python-version: [\"3.8\", \"3.9\", \"3.10\", \"3.11\", \"3.12\"]\n\n steps:\n - uses: \"actions/checkout@v4\"\n - uses: \"actions/setup-python@v5\"\n with:\n python-version: \"${{ matrix.python-version }}\"\n allow-prereleases: true\n - name: \"Install dependencies\"\n run: \"scripts/install\"\n - name: \"Run linting checks\"\n run: \"scripts/check\"\n - name: \"Build package & docs\"\n run: \"scripts/build\"\n - name: \"Run tests\"\n run: \"scripts/test\"\n - name: \"Enforce coverage\"\n run: \"scripts/coverage\"\n", "logs": [{"step_name": "Python 3.8/5_Run linting checks.txt", "log": "##[group]Run scripts/check\n\u001b[36;1mscripts/check\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.8.18/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.8.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.8.18/x64/lib\n##[endgroup]\n+ ./scripts/sync-version\nCHANGELOG_VERSION: 0.26.0\nVERSION: 0.26.0\n+ ruff format httpx tests --diff\n60 files already formatted\n+ mypy httpx tests\nSuccess: no issues found in 60 source files\n+ ruff check httpx tests\nhttpx/_config.py:1:1: I001 [*] Import block is un-sorted or un-formatted\nhttpx/_transports/default.py:26:1: I001 [*] Import block is un-sorted or un-formatted\nFound 2 errors.\n[*] 2 fixable with the `--fix` option.\n##[error]Process completed with exit code 1.\n"}, {"step_name": "Python 3.9/5_Run linting checks.txt", "log": "##[group]Run scripts/check\n\u001b[36;1mscripts/check\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.9.18/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib\n##[endgroup]\n+ ./scripts/sync-version\nCHANGELOG_VERSION: 0.26.0\nVERSION: 0.26.0\n+ ruff format httpx tests --diff\n60 files already formatted\n+ mypy httpx tests\nSuccess: no issues found in 60 source files\n+ ruff check httpx tests\nhttpx/_config.py:1:1: I001 [*] Import block is un-sorted or un-formatted\nhttpx/_transports/default.py:26:1: I001 [*] Import block is un-sorted or un-formatted\nFound 2 errors.\n[*] 2 fixable with the `--fix` option.\n##[error]Process completed with exit code 1.\n"}], "diff": "diff --git a/httpx/_config.py b/httpx/_config.py\nindex 69c3c6f..1af8a45 100644\n--- a/httpx/_config.py\n+++ b/httpx/_config.py\n@@ -12,7 +12,6 @@ from ._types import CertTypes, HeaderTypes, TimeoutTypes, URLTypes, VerifyTypes\n from ._urls import URL\n from ._utils import get_ca_bundle_from_env\n \n-\n SOCKET_OPTION = typing.Union[\n typing.Tuple[int, int, int],\n typing.Tuple[int, int, typing.Union[bytes, bytearray]],\ndiff --git a/httpx/_transports/default.py b/httpx/_transports/default.py\nindex 7802026..0829b57 100644\n--- a/httpx/_transports/default.py\n+++ b/httpx/_transports/default.py\n@@ -32,9 +32,9 @@ import httpcore\n from .._config import (\n DEFAULT_LIMITS,\n DEFAULT_NETWORK_OPTIONS,\n- Proxy,\n Limits,\n NetworkOptions,\n+ Proxy,\n create_ssl_context,\n )\n from .._exceptions import (\n", "difficulty": 0, "changed_files": ["httpx/_config.py", "httpx/_transports/default.py"], "commit_link": "https://github.com/encode/httpx/tree/83b5e4bf130d204fbb25b26a341c62aee4fc2d0f"}
data/python/897a5de.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"language": "Python", "id": 58, "repo_owner": "encode", "repo_name": "httpx", "head_branch": "add-ssl-context-argument", "workflow_name": "Test Suite", "workflow_filename": "test-suite.yml", "workflow_path": ".github/workflows/test-suite.yml", "contributor": "karpetrosyan", "sha_fail": "897a5deb406b53ea2f4675cdf0c2f1fa93fc6238", "sha_success": "88f2ab747c4630e1c8c4454974e4a6e29757e55a", "workflow": "---\nname: Test Suite\n\non:\n push:\n branches: [\"master\"]\n pull_request:\n branches: [\"master\"]\n\njobs:\n tests:\n name: \"Python ${{ matrix.python-version }}\"\n runs-on: \"ubuntu-latest\"\n\n strategy:\n matrix:\n python-version: [\"3.8\", \"3.9\", \"3.10\", \"3.11\", \"3.12\"]\n\n steps:\n - uses: \"actions/checkout@v4\"\n - uses: \"actions/setup-python@v4\"\n with:\n python-version: \"${{ matrix.python-version }}\"\n allow-prereleases: true\n - name: \"Install dependencies\"\n run: \"scripts/install\"\n - name: \"Run linting checks\"\n run: \"scripts/check\"\n - name: \"Build package & docs\"\n run: \"scripts/build\"\n - name: \"Run tests\"\n run: \"scripts/test\"\n - name: \"Enforce coverage\"\n run: \"scripts/coverage\"\n", "logs": [{"step_name": "Python 3.8/5_Run linting checks.txt", "log": "##[group]Run scripts/check\n\u001b[36;1mscripts/check\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.8.18/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.8.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.8.18/x64/lib\n##[endgroup]\n+ ./scripts/sync-version\nCHANGELOG_VERSION: 0.26.0\nVERSION: 0.26.0\n+ ruff format httpx tests --diff\n60 files left unchanged\n+ mypy httpx tests\nSuccess: no issues found in 60 source files\n+ ruff check httpx tests\nhttpx/__init__.py:1:1: I001 [*] Import block is un-sorted or un-formatted\nFound 1 error.\n[*] 1 fixable with the `--fix` option.\n##[error]Process completed with exit code 1.\n"}, {"step_name": "Python 3.9/5_Run linting checks.txt", "log": "##[group]Run scripts/check\n\u001b[36;1mscripts/check\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.9.18/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib\n##[endgroup]\n+ ./scripts/sync-version\nCHANGELOG_VERSION: 0.26.0\nVERSION: 0.26.0\n+ ruff format httpx tests --diff\n60 files left unchanged\n+ mypy httpx tests\nSuccess: no issues found in 60 source files\n+ ruff check httpx tests\nhttpx/__init__.py:1:1: I001 [*] Import block is un-sorted or un-formatted\nFound 1 error.\n[*] 1 fixable with the `--fix` option.\n##[error]Process completed with exit code 1.\n"}, {"step_name": "Python 3.10/5_Run linting checks.txt", "log": "##[group]Run scripts/check\n\u001b[36;1mscripts/check\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.10.13/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.10.13/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.10.13/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.10.13/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.10.13/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.10.13/x64/lib\n##[endgroup]\n+ ./scripts/sync-version\nCHANGELOG_VERSION: 0.26.0\n+ ruff format httpx tests --diff\nVERSION: 0.26.0\n60 files left unchanged\n+ mypy httpx tests\nSuccess: no issues found in 60 source files\n+ ruff check httpx tests\nhttpx/__init__.py:1:1: I001 [*] Import block is un-sorted or un-formatted\nFound 1 error.\n[*] 1 fixable with the `--fix` option.\n##[error]Process completed with exit code 1.\n"}, {"step_name": "Python 3.11/5_Run linting checks.txt", "log": "##[group]Run scripts/check\n\u001b[36;1mscripts/check\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.11.7/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.11.7/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.11.7/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.11.7/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.11.7/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.11.7/x64/lib\n##[endgroup]\n+ ./scripts/sync-version\nCHANGELOG_VERSION: 0.26.0\nVERSION: 0.26.0\n+ ruff format httpx tests --diff\n60 files left unchanged\n+ mypy httpx tests\nSuccess: no issues found in 60 source files\n+ ruff check httpx tests\nhttpx/__init__.py:1:1: I001 [*] Import block is un-sorted or un-formatted\nFound 1 error.\n[*] 1 fixable with the `--fix` option.\n##[error]Process completed with exit code 1.\n"}], "diff": "diff --git a/httpx/__init__.py b/httpx/__init__.py\nindex bfb1867..23b9f31 100644\n--- a/httpx/__init__.py\n+++ b/httpx/__init__.py\n@@ -2,7 +2,7 @@ from .__version__ import __description__, __title__, __version__\n from ._api import delete, get, head, options, patch, post, put, request, stream\n from ._auth import Auth, BasicAuth, DigestAuth, NetRCAuth\n from ._client import USE_CLIENT_DEFAULT, AsyncClient, Client\n-from ._config import Limits, Proxy, Timeout, SSLContext\n+from ._config import Limits, Proxy, SSLContext, Timeout\n from ._content import ByteStream\n from ._exceptions import (\n CloseError,\n", "difficulty": 0, "changed_files": ["httpx/__init__.py"], "commit_link": "https://github.com/encode/httpx/tree/897a5deb406b53ea2f4675cdf0c2f1fa93fc6238"}
data/python/903a05c.json ADDED
The diff for this file is too large to render. See raw diff
 
data/python/9261583.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"language": "Python", "id": 36, "repo_owner": "lightly-ai", "repo_name": "lightly", "head_branch": "master", "workflow_name": "Code Format Check", "workflow_filename": "test_code_format.yml", "workflow_path": ".github/workflows/test_code_format.yml", "contributor": "jameschapman19", "sha_fail": "9261583aece340e8b1ee2f06c68f017ce8da468c", "sha_success": "e8e30547893c2979e648108284c17d8ca8ec2995", "workflow": "name: Code Format Check\n\non:\n push:\n pull_request:\n workflow_dispatch:\n\njobs:\n test:\n name: Check\n runs-on: ubuntu-latest\n steps:\n - name: Checkout Code\n uses: actions/checkout@v3\n - name: Hack to get setup-python to work on nektos/act\n run: |\n if [ ! -f \"/etc/lsb-release\" ] ; then\n echo \"DISTRIB_RELEASE=18.04\" > /etc/lsb-release\n fi\n - name: Set up Python\n uses: actions/setup-python@v4\n with:\n python-version: \"3.7\"\n - uses: actions/cache@v2\n with:\n path: ${{ env.pythonLocation }}\n key: cache_v2_${{ env.pythonLocation }}-${{ hashFiles('requirements/**') }}\n - name: Install Dependencies and lightly\n run: pip install -e '.[all]'\n - name: Run Format Check\n run: |\n make format-check\n - name: Run Type Check\n run: |\n make type-check\n", "logs": [{"step_name": "Check/8_Run Type Check.txt", "log": "##[group]Run make type-check\n\u001b[36;1mmake type-check\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.7.17/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.7.17/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.7.17/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.7.17/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.7.17/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.7.17/x64/lib\n##[endgroup]\nmypy lightly tests\nlightly/loss/ssley_loss.py:6: note: In module imported here:\nlightly/utils/dist.py: note: In class \"GatherLayer\":\nlightly/utils/dist.py:19: error: Signature of \"forward\" incompatible with supertype \"Function\" [override]\nlightly/utils/dist.py:19: note: Error code \"override\" not covered by \"type: ignore\" comment\nlightly/utils/dist.py:19: note: Superclass:\nlightly/utils/dist.py:19: note: @staticmethod\nlightly/utils/dist.py:19: note: def forward(ctx: Any, *args: Any, **kwargs: Any) -> Any\nlightly/utils/dist.py:19: note: Subclass:\nlightly/utils/dist.py:19: note: @staticmethod\nlightly/utils/dist.py:19: note: def forward(ctx: Any, input: Tensor) -> tuple[Tensor, ...]\nFound 1 error in 1 file (checked 362 source files)\nmake: *** [Makefile:68: type-check] Error 1\n##[error]Process completed with exit code 2.\n"}], "diff": "diff --git a/lightly/utils/dist.py b/lightly/utils/dist.py\nindex f44df6b9..88407556 100644\n--- a/lightly/utils/dist.py\n+++ b/lightly/utils/dist.py\n@@ -14,9 +14,11 @@ class GatherLayer(Function):\n \n \"\"\"\n \n- # Type ignore is required because superclass uses Any type for ctx.\n+ # Type ignore misc is required because the superclass uses Any type for ctx.\n+ # Type ignore override is required because the superclass has a different signature\n+ # for forward.\n @staticmethod\n- def forward(ctx: Any, input: Tensor) -> Tuple[Tensor, ...]: # type: ignore[misc]\n+ def forward(ctx: Any, input: Tensor) -> Tuple[Tensor, ...]: # type: ignore[misc, override]\n ctx.save_for_backward(input)\n output = [torch.empty_like(input) for _ in range(dist.get_world_size())]\n dist.all_gather(output, input)\n", "difficulty": 1, "changed_files": ["lightly/utils/dist.py"], "commit_link": "https://github.com/lightly-ai/lightly/tree/9261583aece340e8b1ee2f06c68f017ce8da468c"}
data/python/92937f3.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"language": "Python", "id": 3, "repo_owner": "pymc-devs", "repo_name": "pymc", "head_branch": "ruff_linter", "workflow_name": "pre-commit", "workflow_filename": "pre-commit.yml", "workflow_path": ".github/workflows/pre-commit.yml", "contributor": "juanitorduz", "sha_fail": "92937f3e3c898a7c90b04b04341d4f1b75e275cf", "sha_success": "2b3a34a0527b4258f6f18f0187e9116edf412f0e", "workflow": "name: pre-commit\n\non:\n pull_request:\n push:\n branches: [main]\n\njobs:\n pre-commit:\n runs-on: ubuntu-latest\n env:\n SKIP: no-commit-to-branch\n steps:\n - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11\n - uses: actions/setup-python@v5\n with:\n python-version: \"3.9\" # Run pre-commit on oldest supported Python version\n - uses: pre-commit/[email protected]\n mypy:\n runs-on: ubuntu-latest\n defaults:\n run:\n shell: bash -l {0}\n steps:\n - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11\n - name: Cache conda\n uses: actions/cache@v3\n env:\n # Increase this value to reset cache if environment-test.yml has not changed\n CACHE_NUMBER: 0\n with:\n path: ~/conda_pkgs_dir\n key: ${{ runner.os }}-py39-conda-${{ env.CACHE_NUMBER }}-${{\n hashFiles('conda-envs/environment-test.yml') }}\n - name: Cache multiple paths\n uses: actions/cache@v3\n env:\n # Increase this value to reset cache if requirements.txt has not changed\n CACHE_NUMBER: 0\n with:\n path: |\n ~/.cache/pip\n $RUNNER_TOOL_CACHE/Python/*\n ~\\AppData\\Local\\pip\\Cache\n key: ${{ runner.os }}-build-${{ matrix.python-version }}-${{ env.CACHE_NUMBER }}-${{\n hashFiles('requirements.txt') }}\n - uses: conda-incubator/setup-miniconda@v2\n with:\n miniforge-variant: Mambaforge\n miniforge-version: latest\n mamba-version: \"*\"\n activate-environment: pymc-test\n channel-priority: strict\n environment-file: conda-envs/environment-test.yml\n python-version: \"3.9\" # Run pre-commit on oldest supported Python version\n use-mamba: true\n use-only-tar-bz2: false # IMPORTANT: This may break caching of conda packages! See https://github.com/conda-incubator/setup-miniconda/issues/267\n - name: Install-pymc and mypy dependencies\n run: |\n conda activate pymc-test\n pip install -e .\n pip install --pre -U polyagamma\n python --version\n - name: Run mypy\n run: |\n conda activate pymc-test\n python ./scripts/run_mypy.py --verbose\n", "logs": [{"step_name": "pre-commit/4_Run [email protected]", "log": "##[group]Run pre-commit/[email protected]\nwith:\n extra_args: --all-files\nenv:\n SKIP: no-commit-to-branch\n pythonLocation: /opt/hostedtoolcache/Python/3.9.18/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib\n##[endgroup]\n##[group]Run python -m pip install pre-commit\n\u001b[36;1mpython -m pip install pre-commit\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\nenv:\n SKIP: no-commit-to-branch\n pythonLocation: /opt/hostedtoolcache/Python/3.9.18/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib\n##[endgroup]\nCollecting pre-commit\n Downloading pre_commit-3.6.0-py2.py3-none-any.whl.metadata (1.3 kB)\nCollecting cfgv>=2.0.0 (from pre-commit)\n Downloading cfgv-3.4.0-py2.py3-none-any.whl.metadata (8.5 kB)\nCollecting identify>=1.0.0 (from pre-commit)\n Downloading identify-2.5.33-py2.py3-none-any.whl.metadata (4.4 kB)\nCollecting nodeenv>=0.11.1 (from pre-commit)\n Downloading nodeenv-1.8.0-py2.py3-none-any.whl.metadata (21 kB)\nCollecting pyyaml>=5.1 (from pre-commit)\n Downloading PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (2.1 kB)\nCollecting virtualenv>=20.10.0 (from pre-commit)\n Downloading virtualenv-20.25.0-py3-none-any.whl.metadata (4.5 kB)\nRequirement already satisfied: setuptools in /opt/hostedtoolcache/Python/3.9.18/x64/lib/python3.9/site-packages (from nodeenv>=0.11.1->pre-commit) (58.1.0)\nCollecting distlib<1,>=0.3.7 (from virtualenv>=20.10.0->pre-commit)\n Downloading distlib-0.3.8-py2.py3-none-any.whl.metadata (5.1 kB)\nCollecting filelock<4,>=3.12.2 (from virtualenv>=20.10.0->pre-commit)\n Downloading filelock-3.13.1-py3-none-any.whl.metadata (2.8 kB)\nCollecting platformdirs<5,>=3.9.1 (from virtualenv>=20.10.0->pre-commit)\n Downloading platformdirs-4.1.0-py3-none-any.whl.metadata (11 kB)\nDownloading pre_commit-3.6.0-py2.py3-none-any.whl (204 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 204.0/204.0 kB 15.9 MB/s eta 0:00:00\nDownloading cfgv-3.4.0-py2.py3-none-any.whl (7.2 kB)\nDownloading identify-2.5.33-py2.py3-none-any.whl (98 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 98.9/98.9 kB 27.2 MB/s eta 0:00:00\nDownloading nodeenv-1.8.0-py2.py3-none-any.whl (22 kB)\nDownloading PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (738 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 738.9/738.9 kB 52.7 MB/s eta 0:00:00\nDownloading virtualenv-20.25.0-py3-none-any.whl (3.8 MB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 3.8/3.8 MB 108.7 MB/s eta 0:00:00\nDownloading distlib-0.3.8-py2.py3-none-any.whl (468 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 468.9/468.9 kB 69.4 MB/s eta 0:00:00\nDownloading filelock-3.13.1-py3-none-any.whl (11 kB)\nDownloading platformdirs-4.1.0-py3-none-any.whl (17 kB)\nInstalling collected packages: distlib, pyyaml, platformdirs, nodeenv, identify, filelock, cfgv, virtualenv, pre-commit\nSuccessfully installed cfgv-3.4.0 distlib-0.3.8 filelock-3.13.1 identify-2.5.33 nodeenv-1.8.0 platformdirs-4.1.0 pre-commit-3.6.0 pyyaml-6.0.1 virtualenv-20.25.0\n\n[notice] A new release of pip is available: 23.0.1 -> 23.3.2\n[notice] To update, run: pip install --upgrade pip\n##[group]Run python -m pip freeze --local\n\u001b[36;1mpython -m pip freeze --local\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\nenv:\n SKIP: no-commit-to-branch\n pythonLocation: /opt/hostedtoolcache/Python/3.9.18/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib\n##[endgroup]\ncfgv==3.4.0\ndistlib==0.3.8\nfilelock==3.13.1\nidentify==2.5.33\nnodeenv==1.8.0\nplatformdirs==4.1.0\npre-commit==3.6.0\nPyYAML==6.0.1\nvirtualenv==20.25.0\n##[group]Run actions/cache@v3\nwith:\n path: ~/.cache/pre-commit\n key: pre-commit-3|/opt/hostedtoolcache/Python/3.9.18/x64|b69a9ccf7aebb98803ff18cb769a19428ca4b15f855ee813cd0630df2c6b60db\n enableCrossOsArchive: false\n fail-on-cache-miss: false\n lookup-only: false\nenv:\n SKIP: no-commit-to-branch\n pythonLocation: /opt/hostedtoolcache/Python/3.9.18/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib\n##[endgroup]\nCache not found for input keys: pre-commit-3|/opt/hostedtoolcache/Python/3.9.18/x64|b69a9ccf7aebb98803ff18cb769a19428ca4b15f855ee813cd0630df2c6b60db\n##[group]Run pre-commit run --show-diff-on-failure --color=always --all-files\n\u001b[36;1mpre-commit run --show-diff-on-failure --color=always --all-files\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\nenv:\n SKIP: no-commit-to-branch\n pythonLocation: /opt/hostedtoolcache/Python/3.9.18/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib\n##[endgroup]\n[INFO]\u001b[m Initializing environment for https://github.com/pre-commit/pre-commit-hooks.\n[INFO]\u001b[m Initializing environment for https://github.com/lucianopaz/head_of_apache.\n[INFO]\u001b[m Initializing environment for https://github.com/astral-sh/ruff-pre-commit.\n[INFO]\u001b[m Initializing environment for https://github.com/PyCQA/pydocstyle.\n[INFO]\u001b[m Initializing environment for https://github.com/MarcoGorelli/madforhooks.\n[INFO]\u001b[m Initializing environment for local:pandas,pyyaml.\n[INFO]\u001b[m Initializing environment for local:pyyaml.\n[INFO]\u001b[m Installing environment for https://github.com/pre-commit/pre-commit-hooks.\n[INFO]\u001b[m Once installed this environment will be reused.\n[INFO]\u001b[m This may take a few minutes...\n[INFO]\u001b[m Installing environment for https://github.com/lucianopaz/head_of_apache.\n[INFO]\u001b[m Once installed this environment will be reused.\n[INFO]\u001b[m This may take a few minutes...\n[INFO]\u001b[m Installing environment for https://github.com/astral-sh/ruff-pre-commit.\n[INFO]\u001b[m Once installed this environment will be reused.\n[INFO]\u001b[m This may take a few minutes...\n[INFO]\u001b[m Installing environment for https://github.com/PyCQA/pydocstyle.\n[INFO]\u001b[m Once installed this environment will be reused.\n[INFO]\u001b[m This may take a few minutes...\n[INFO]\u001b[m Installing environment for https://github.com/MarcoGorelli/madforhooks.\n[INFO]\u001b[m Once installed this environment will be reused.\n[INFO]\u001b[m This may take a few minutes...\n[INFO]\u001b[m Installing environment for local.\n[INFO]\u001b[m Once installed this environment will be reused.\n[INFO]\u001b[m This may take a few minutes...\n[INFO]\u001b[m Installing environment for local.\n[INFO]\u001b[m Once installed this environment will be reused.\n[INFO]\u001b[m This may take a few minutes...\ncheck for merge conflicts............................................................\u001b[42mPassed\u001b[m\ncheck toml...........................................................................\u001b[42mPassed\u001b[m\ncheck yaml...........................................................................\u001b[42mPassed\u001b[m\ndebug statements (python)............................................................\u001b[42mPassed\u001b[m\nfix end of files.....................................................................\u001b[42mPassed\u001b[m\ndon't commit to branch..............................................................\u001b[43;30mSkipped\u001b[m\nfix requirements.txt.................................................................\u001b[42mPassed\u001b[m\ntrim trailing whitespace.............................................................\u001b[42mPassed\u001b[m\nApply Apache 2.0 License.............................................................\u001b[42mPassed\u001b[m\nruff.................................................................................\u001b[41mFailed\u001b[m\n\u001b[2m- hook id: ruff\u001b[m\n\u001b[2m- exit code: 1\u001b[m\n\u001b[2m- files were modified by this hook\u001b[m\n\n\u001b[1mpymc/_version.py\u001b[0m\u001b[36m:\u001b[0m157\u001b[36m:\u001b[0m13\u001b[36m:\u001b[0m \u001b[1;31mUP031\u001b[0m Use format specifiers instead of percent format\n\u001b[1;38;5;12m |\u001b[0m\n\u001b[1;38;5;12m155 |\u001b[0m if verbose:\n\u001b[1;38;5;12m156 |\u001b[0m print(\n\u001b[1;38;5;12m157 |\u001b[0m \"Tried directories %s but none started with prefix %s\"\n\u001b[1;38;5;12m |\u001b[0m \u001b[1;38;5;9m_____________^\u001b[0m\n\u001b[1;38;5;12m158 |\u001b[0m \u001b[1;38;5;9m|\u001b[0m % (str(rootdirs), parentdir_prefix)\n\u001b[1;38;5;12m |\u001b[0m \u001b[1;38;5;9m|\u001b[0m\u001b[1;38;5;9m_______________________________________________^\u001b[0m \u001b[1;38;5;9mUP031\u001b[0m\n\u001b[1;38;5;12m159 |\u001b[0m )\n\u001b[1;38;5;12m160 |\u001b[0m raise NotThisMethod(\"rootdir doesn't start with parentdir_prefix\")\n\u001b[1;38;5;12m |\u001b[0m\n \u001b[1;38;5;12m=\u001b[0m \u001b[1;38;5;14mhelp\u001b[0m: Replace with format specifiers\n\n\u001b[1mpymc/step_methods/hmc/integration.py\u001b[0m\u001b[36m:\u001b[0m47\u001b[36m:\u001b[0m17\u001b[36m:\u001b[0m \u001b[1;31mUP031\u001b[0m Use format specifiers instead of percent format\n\u001b[1;38;5;12m |\u001b[0m\n\u001b[1;38;5;12m45 |\u001b[0m if self._potential.dtype != self._dtype:\n\u001b[1;38;5;12m46 |\u001b[0m raise ValueError(\n\u001b[1;38;5;12m47 |\u001b[0m \"dtypes of potential (%s) and logp function (%s)\"\n\u001b[1;38;5;12m |\u001b[0m \u001b[1;38;5;9m_________________^\u001b[0m\n\u001b[1;38;5;12m48 |\u001b[0m \u001b[1;38;5;9m|\u001b[0m \"don't match.\" % (self._potential.dtype, self._dtype)\n\u001b[1;38;5;12m |\u001b[0m \u001b[1;38;5;9m|\u001b[0m\u001b[1;38;5;9m_____________________________________________________________________^\u001b[0m \u001b[1;38;5;9mUP031\u001b[0m\n\u001b[1;38;5;12m49 |\u001b[0m )\n\u001b[1;38;5;12m |\u001b[0m\n \u001b[1;38;5;12m=\u001b[0m \u001b[1;38;5;14mhelp\u001b[0m: Replace with format specifiers\n\n\u001b[1mpymc/variational/opvi.py\u001b[0m\u001b[36m:\u001b[0m461\u001b[36m:\u001b[0m17\u001b[36m:\u001b[0m \u001b[1;31mUP031\u001b[0m Use format specifiers instead of percent format\n\u001b[1;38;5;12m |\u001b[0m\n\u001b[1;38;5;12m459 |\u001b[0m if self.require_logq and not approx.has_logq:\n\u001b[1;38;5;12m460 |\u001b[0m raise ExplicitInferenceError(\n\u001b[1;38;5;12m461 |\u001b[0m \"%s requires logq, but %s does not implement it\"\n\u001b[1;38;5;12m |\u001b[0m \u001b[1;38;5;9m_________________^\u001b[0m\n\u001b[1;38;5;12m462 |\u001b[0m \u001b[1;38;5;9m|\u001b[0m \"please change inference method\" % (self, approx)\n\u001b[1;38;5;12m |\u001b[0m \u001b[1;38;5;9m|\u001b[0m\u001b[1;38;5;9m_________________________________________________________________^\u001b[0m \u001b[1;38;5;9mUP031\u001b[0m\n\u001b[1;38;5;12m463 |\u001b[0m )\n\u001b[1;38;5;12m |\u001b[0m\n \u001b[1;38;5;12m=\u001b[0m \u001b[1;38;5;14mhelp\u001b[0m: Replace with format specifiers\n\n\u001b[1mpymc/variational/opvi.py\u001b[0m\u001b[36m:\u001b[0m513\u001b[36m:\u001b[0m16\u001b[36m:\u001b[0m \u001b[1;31mUP031\u001b[0m Use format specifiers instead of percent format\n\u001b[1;38;5;12m |\u001b[0m\n\u001b[1;38;5;12m512 |\u001b[0m def __str__(self): # pragma: no cover\n\u001b[1;38;5;12m513 |\u001b[0m return \"%(op)s[%(ap)s]\" % dict(\n\u001b[1;38;5;12m |\u001b[0m \u001b[1;38;5;9m________________^\u001b[0m\n\u001b[1;38;5;12m514 |\u001b[0m \u001b[1;38;5;9m|\u001b[0m op=self.__class__.__name__, ap=self.approx.__class__.__name__\n\u001b[1;38;5;12m515 |\u001b[0m \u001b[1;38;5;9m|\u001b[0m )\n\u001b[1;38;5;12m |\u001b[0m \u001b[1;38;5;9m|\u001b[0m\u001b[1;38;5;9m_________^\u001b[0m \u001b[1;38;5;9mUP031\u001b[0m\n\u001b[1;38;5;12m |\u001b[0m\n \u001b[1;38;5;12m=\u001b[0m \u001b[1;38;5;14mhelp\u001b[0m: Replace with format specifiers\n\n\u001b[1mversioneer.py\u001b[0m\u001b[36m:\u001b[0m328\u001b[36m:\u001b[0m17\u001b[36m:\u001b[0m \u001b[1;31mUP031\u001b[0m Use format specifiers instead of percent format\n\u001b[1;38;5;12m |\u001b[0m\n\u001b[1;38;5;12m326 |\u001b[0m if me_dir != vsr_dir:\n\u001b[1;38;5;12m327 |\u001b[0m print(\n\u001b[1;38;5;12m328 |\u001b[0m \"Warning: build in %s is using versioneer.py from %s\"\n\u001b[1;38;5;12m |\u001b[0m \u001b[1;38;5;9m_________________^\u001b[0m\n\u001b[1;38;5;12m329 |\u001b[0m \u001b[1;38;5;9m|\u001b[0m % (os.path.dirname(my_path), versioneer_py)\n\u001b[1;38;5;12m |\u001b[0m \u001b[1;38;5;9m|\u001b[0m\u001b[1;38;5;9m___________________________________________________________^\u001b[0m \u001b[1;38;5;9mUP031\u001b[0m\n\u001b[1;38;5;12m330 |\u001b[0m )\n\u001b[1;38;5;12m331 |\u001b[0m except NameError:\n\u001b[1;38;5;12m |\u001b[0m\n \u001b[1;38;5;12m=\u001b[0m \u001b[1;38;5;14mhelp\u001b[0m: Replace with format specifiers\n\n\u001b[1mversioneer.py\u001b[0m\u001b[36m:\u001b[0m1388\u001b[36m:\u001b[0m13\u001b[36m:\u001b[0m \u001b[1;31mUP031\u001b[0m Use format specifiers instead of percent format\n\u001b[1;38;5;12m |\u001b[0m\n\u001b[1;38;5;12m1386 |\u001b[0m if verbose:\n\u001b[1;38;5;12m1387 |\u001b[0m print(\n\u001b[1;38;5;12m1388 |\u001b[0m \"Tried directories %s but none started with prefix %s\"\n\u001b[1;38;5;12m |\u001b[0m \u001b[1;38;5;9m_____________^\u001b[0m\n\u001b[1;38;5;12m1389 |\u001b[0m \u001b[1;38;5;9m|\u001b[0m % (str(rootdirs), parentdir_prefix)\n\u001b[1;38;5;12m |\u001b[0m \u001b[1;38;5;9m|\u001b[0m\u001b[1;38;5;9m_______________________________________________^\u001b[0m \u001b[1;38;5;9mUP031\u001b[0m\n\u001b[1;38;5;12m1390 |\u001b[0m )\n\u001b[1;38;5;12m1391 |\u001b[0m raise NotThisMethod(\"rootdir doesn't start with parentdir_prefix\")\n\u001b[1;38;5;12m |\u001b[0m\n \u001b[1;38;5;12m=\u001b[0m \u001b[1;38;5;14mhelp\u001b[0m: Replace with format specifiers\n\nFound 14 errors (8 fixed, 6 remaining).\nNo fixes available (6 hidden fixes can be enabled with the `--unsafe-fixes` option).\n\nruff-format..........................................................................\u001b[41mFailed\u001b[m\n\u001b[2m- hook id: ruff-format\u001b[m\n\u001b[2m- files were modified by this hook\u001b[m\n\n2 files reformatted, 206 files left unchanged\n\npydocstyle...........................................................................\u001b[42mPassed\u001b[m\nDisallow print statements............................................................\u001b[42mPassed\u001b[m\nCheck no tests are ignored...........................................................\u001b[42mPassed\u001b[m\nGenerate pip dependency from conda...................................................\u001b[42mPassed\u001b[m\nNo relative imports..................................................................\u001b[42mPassed\u001b[m\nCheck no links that should be cross-references are in the docs.......................\u001b[42mPassed\u001b[m\npre-commit hook(s) made changes.\nIf you are seeing this message in CI, reproduce locally with: `pre-commit run --all-files`.\nTo run `pre-commit` as part of git workflow, use `pre-commit install`.\nAll changes made by hooks:\n\u001b[1mdiff --git a/pymc/gp/util.py b/pymc/gp/util.py\u001b[m\n\u001b[1mindex 39eb3b6..3f829ab 100644\u001b[m\n\u001b[1m--- a/pymc/gp/util.py\u001b[m\n\u001b[1m+++ b/pymc/gp/util.py\u001b[m\n\u001b[36m@@ -120,7 +120,7 @@\u001b[m \u001b[mdef kmeans_inducing_points(n_inducing, X, **kmeans_kwargs):\u001b[m\n \"To use K-means initialization, \"\u001b[m\n \"please provide X as a type that \"\u001b[m\n \"can be cast to np.ndarray, instead \"\u001b[m\n\u001b[31m- \"of {}\".format(type(X))\u001b[m\n\u001b[32m+\u001b[m\u001b[32m f\"of {type(X)}\"\u001b[m\n )\u001b[m\n scaling = np.std(X, 0)\u001b[m\n # if std of a column is very small (zero), don't normalize that column\u001b[m\n\u001b[1mdiff --git a/pymc/sampling/population.py b/pymc/sampling/population.py\u001b[m\n\u001b[1mindex 6dbe5cf..5ed98c0 100644\u001b[m\n\u001b[1m--- a/pymc/sampling/population.py\u001b[m\n\u001b[1m+++ b/pymc/sampling/population.py\u001b[m\n\u001b[36m@@ -135,7 +135,7 @@\u001b[m \u001b[mdef warn_population_size(\u001b[m\n if has_demcmc and chains <= initial_point_model_size:\u001b[m\n warnings.warn(\u001b[m\n \"DEMetropolis should be used with more chains than dimensions! \"\u001b[m\n\u001b[31m- \"(The model has {} dimensions.)\".format(initial_point_model_size),\u001b[m\n\u001b[32m+\u001b[m\u001b[32m f\"(The model has {initial_point_model_size} dimensions.)\",\u001b[m\n UserWarning,\u001b[m\n stacklevel=2,\u001b[m\n )\u001b[m\n\u001b[1mdiff --git a/pymc/step_methods/arraystep.py b/pymc/step_methods/arraystep.py\u001b[m\n\u001b[1mindex 4c945ab..35f1443 100644\u001b[m\n\u001b[1m--- a/pymc/step_methods/arraystep.py\u001b[m\n\u001b[1m+++ b/pymc/step_methods/arraystep.py\u001b[m\n\u001b[36m@@ -146,10 +146,8 @@\u001b[m \u001b[mclass PopulationArrayStepShared(ArrayStepShared):\u001b[m\n self.other_chains = [c for c in range(len(population)) if c != chain_index]\u001b[m\n if not len(self.other_chains) > 1:\u001b[m\n raise ValueError(\u001b[m\n\u001b[31m- \"Population is just {} + {}. \"\u001b[m\n\u001b[31m- \"This is too small and the error should have been raised earlier.\".format(\u001b[m\n\u001b[31m- self.this_chain, self.other_chains\u001b[m\n\u001b[31m- )\u001b[m\n\u001b[32m+\u001b[m\u001b[32m f\"Population is just {self.this_chain} + {self.other_chains}. \"\u001b[m\n\u001b[32m+\u001b[m\u001b[32m \"This is too small and the error should have been raised earlier.\"\u001b[m\n )\u001b[m\n return\u001b[m\n \u001b[m\n\u001b[1mdiff --git a/pymc/variational/inference.py b/pymc/variational/inference.py\u001b[m\n\u001b[1mindex ce3bd02..6ee5815 100644\u001b[m\n\u001b[1m--- a/pymc/variational/inference.py\u001b[m\n\u001b[1m+++ b/pymc/variational/inference.py\u001b[m\n\u001b[36m@@ -244,17 +244,11 @@\u001b[m \u001b[mclass Inference:\u001b[m\n if isinstance(e, StopIteration):\u001b[m\n logger.info(str(e))\u001b[m\n if n < 10:\u001b[m\n\u001b[31m- logger.info(\u001b[m\n\u001b[31m- \"Interrupted at {:,d} [{:.0f}%]: Loss = {:,.5g}\".format(\u001b[m\n\u001b[31m- i, 100 * i // n, scores[i]\u001b[m\n\u001b[31m- )\u001b[m\n\u001b[31m- )\u001b[m\n\u001b[32m+\u001b[m\u001b[32m logger.info(f\"Interrupted at {i:,d} [{100 * i // n:.0f}%]: Loss = {scores[i]:,.5g}\")\u001b[m\n else:\u001b[m\n avg_loss = _infmean(scores[min(0, i - 1000) : i + 1])\u001b[m\n logger.info(\u001b[m\n\u001b[31m- \"Interrupted at {:,d} [{:.0f}%]: Average Loss = {:,.5g}\".format(\u001b[m\n\u001b[31m- i, 100 * i // n, avg_loss\u001b[m\n\u001b[31m- )\u001b[m\n\u001b[32m+\u001b[m\u001b[32m f\"Interrupted at {i:,d} [{100 * i // n:.0f}%]: Average Loss = {avg_loss:,.5g}\"\u001b[m\n )\u001b[m\n else:\u001b[m\n if n == 0:\u001b[m\n\u001b[1mdiff --git a/pymc/variational/opvi.py b/pymc/variational/opvi.py\u001b[m\n\u001b[1mindex bd1874f..5d70575 100644\u001b[m\n\u001b[1m--- a/pymc/variational/opvi.py\u001b[m\n\u001b[1m+++ b/pymc/variational/opvi.py\u001b[m\n\u001b[36m@@ -701,8 +701,8 @@\u001b[m \u001b[mclass Group(WithMemoization):\u001b[m\n def group_for_params(cls, params):\u001b[m\n if frozenset(params) not in cls.__param_registry:\u001b[m\n raise KeyError(\u001b[m\n\u001b[31m- \"No such group for the following params: {!r}, \"\u001b[m\n\u001b[31m- \"only the following are supported\\n\\n{}\".format(params, cls.__param_registry)\u001b[m\n\u001b[32m+\u001b[m\u001b[32m f\"No such group for the following params: {params!r}, \"\u001b[m\n\u001b[32m+\u001b[m\u001b[32m f\"only the following are supported\\n\\n{cls.__param_registry}\"\u001b[m\n )\u001b[m\n return cls.__param_registry[frozenset(params)]\u001b[m\n \u001b[m\n\u001b[36m@@ -801,9 +801,7 @@\u001b[m \u001b[mclass Group(WithMemoization):\u001b[m\n if givens != needed:\u001b[m\n raise ParametrizationError(\u001b[m\n \"Passed parameters do not have a needed set of keys, \"\u001b[m\n\u001b[31m- \"they should be equal, got {givens}, needed {needed}\".format(\u001b[m\n\u001b[31m- givens=givens, needed=needed\u001b[m\n\u001b[31m- )\u001b[m\n\u001b[32m+\u001b[m\u001b[32m f\"they should be equal, got {givens}, needed {needed}\"\u001b[m\n )\u001b[m\n self._user_params = dict()\u001b[m\n spec = self.get_param_spec_for(d=self.ddim, **kwargs.pop(\"spec_kw\", {}))\u001b[m\n\u001b[1mdiff --git a/pymc/variational/updates.py b/pymc/variational/updates.py\u001b[m\n\u001b[1mindex fa6e52b..4f46970 100644\u001b[m\n\u001b[1m--- a/pymc/variational/updates.py\u001b[m\n\u001b[1m+++ b/pymc/variational/updates.py\u001b[m\n\u001b[36m@@ -1005,9 +1005,7 @@\u001b[m \u001b[mdef norm_constraint(tensor_var, max_norm, norm_axes=None, epsilon=1e-7):\u001b[m\n elif ndim in [3, 4, 5]: # Conv{1,2,3}DLayer\u001b[m\n sum_over = tuple(range(1, ndim))\u001b[m\n else:\u001b[m\n\u001b[31m- raise ValueError(\u001b[m\n\u001b[31m- \"Unsupported tensor dimensionality {}.\" \"Must specify `norm_axes`\".format(ndim)\u001b[m\n\u001b[31m- )\u001b[m\n\u001b[32m+\u001b[m\u001b[32m raise ValueError(f\"Unsupported tensor dimensionality {ndim}.\" \"Must specify `norm_axes`\")\u001b[m\n \u001b[m\n dtype = np.dtype(pytensor.config.floatX).type\u001b[m\n norms = pt.sqrt(pt.sum(pt.sqr(tensor_var), axis=sum_over, keepdims=True))\u001b[m\n##[error]Process completed with exit code 1.\n"}], "diff": "diff --git a/docs/source/contributing/python_style.md b/docs/source/contributing/python_style.md\nindex def33f053..5bb73d503 100644\n--- a/docs/source/contributing/python_style.md\n+++ b/docs/source/contributing/python_style.md\n@@ -33,7 +33,7 @@ git commit -m \"wip lol\" --no-verify\n To skip one particular hook, you can set the `SKIP` environment variable. E.g. (on Linux):\n \n ```bash\n-SKIP=pyupgrade git commit -m \"<descriptive message>\"\n+SKIP=ruff git commit -m \"<descriptive message>\"\n ```\n \n You can manually run all `pre-commit` hooks on all files with\ndiff --git a/pymc/_version.py b/pymc/_version.py\nindex 24c8e98be..ebbf5a3a2 100644\n--- a/pymc/_version.py\n+++ b/pymc/_version.py\n@@ -153,10 +153,7 @@ def versions_from_parentdir(parentdir_prefix, root, verbose):\n root = os.path.dirname(root) # up a level\n \n if verbose:\n- print(\n- \"Tried directories %s but none started with prefix %s\"\n- % (str(rootdirs), parentdir_prefix)\n- )\n+ print(f\"Tried directories {str(rootdirs)} but none started with prefix {parentdir_prefix}\")\n raise NotThisMethod(\"rootdir doesn't start with parentdir_prefix\")\n \n \ndiff --git a/pymc/gp/util.py b/pymc/gp/util.py\nindex 39eb3b678..3f829ab00 100644\n--- a/pymc/gp/util.py\n+++ b/pymc/gp/util.py\n@@ -120,7 +120,7 @@ def kmeans_inducing_points(n_inducing, X, **kmeans_kwargs):\n \"To use K-means initialization, \"\n \"please provide X as a type that \"\n \"can be cast to np.ndarray, instead \"\n- \"of {}\".format(type(X))\n+ f\"of {type(X)}\"\n )\n scaling = np.std(X, 0)\n # if std of a column is very small (zero), don't normalize that column\ndiff --git a/pymc/sampling/population.py b/pymc/sampling/population.py\nindex 6dbe5cf90..5ed98c0a2 100644\n--- a/pymc/sampling/population.py\n+++ b/pymc/sampling/population.py\n@@ -135,7 +135,7 @@ def warn_population_size(\n if has_demcmc and chains <= initial_point_model_size:\n warnings.warn(\n \"DEMetropolis should be used with more chains than dimensions! \"\n- \"(The model has {} dimensions.)\".format(initial_point_model_size),\n+ f\"(The model has {initial_point_model_size} dimensions.)\",\n UserWarning,\n stacklevel=2,\n )\ndiff --git a/pymc/step_methods/arraystep.py b/pymc/step_methods/arraystep.py\nindex 4c945ab51..35f1443d8 100644\n--- a/pymc/step_methods/arraystep.py\n+++ b/pymc/step_methods/arraystep.py\n@@ -146,10 +146,8 @@ class PopulationArrayStepShared(ArrayStepShared):\n self.other_chains = [c for c in range(len(population)) if c != chain_index]\n if not len(self.other_chains) > 1:\n raise ValueError(\n- \"Population is just {} + {}. \"\n- \"This is too small and the error should have been raised earlier.\".format(\n- self.this_chain, self.other_chains\n- )\n+ f\"Population is just {self.this_chain} + {self.other_chains}. \"\n+ \"This is too small and the error should have been raised earlier.\"\n )\n return\n \ndiff --git a/pymc/step_methods/hmc/integration.py b/pymc/step_methods/hmc/integration.py\nindex 8ca72ecc4..c8defa2e8 100644\n--- a/pymc/step_methods/hmc/integration.py\n+++ b/pymc/step_methods/hmc/integration.py\n@@ -44,8 +44,8 @@ class CpuLeapfrogIntegrator:\n self._dtype = self._logp_dlogp_func.dtype\n if self._potential.dtype != self._dtype:\n raise ValueError(\n- \"dtypes of potential (%s) and logp function (%s)\"\n- \"don't match.\" % (self._potential.dtype, self._dtype)\n+ f\"dtypes of potential ({self._potential.dtype}) and logp function ({self._dtype})\"\n+ \"don't match.\"\n )\n \n def compute_state(self, q: RaveledVars, p: RaveledVars):\ndiff --git a/pymc/variational/inference.py b/pymc/variational/inference.py\nindex ce3bd02e2..6ee5815d1 100644\n--- a/pymc/variational/inference.py\n+++ b/pymc/variational/inference.py\n@@ -244,17 +244,11 @@ class Inference:\n if isinstance(e, StopIteration):\n logger.info(str(e))\n if n < 10:\n- logger.info(\n- \"Interrupted at {:,d} [{:.0f}%]: Loss = {:,.5g}\".format(\n- i, 100 * i // n, scores[i]\n- )\n- )\n+ logger.info(f\"Interrupted at {i:,d} [{100 * i // n:.0f}%]: Loss = {scores[i]:,.5g}\")\n else:\n avg_loss = _infmean(scores[min(0, i - 1000) : i + 1])\n logger.info(\n- \"Interrupted at {:,d} [{:.0f}%]: Average Loss = {:,.5g}\".format(\n- i, 100 * i // n, avg_loss\n- )\n+ f\"Interrupted at {i:,d} [{100 * i // n:.0f}%]: Average Loss = {avg_loss:,.5g}\"\n )\n else:\n if n == 0:\ndiff --git a/pymc/variational/opvi.py b/pymc/variational/opvi.py\nindex bd1874ffe..ae2e20ac8 100644\n--- a/pymc/variational/opvi.py\n+++ b/pymc/variational/opvi.py\n@@ -458,8 +458,8 @@ class Operator:\n self.approx = approx\n if self.require_logq and not approx.has_logq:\n raise ExplicitInferenceError(\n- \"%s requires logq, but %s does not implement it\"\n- \"please change inference method\" % (self, approx)\n+ f\"{self} requires logq, but {approx} does not implement it\"\n+ \"please change inference method\"\n )\n \n inputs = property(lambda self: self.approx.inputs)\n@@ -510,9 +510,7 @@ class Operator:\n return self.objective_class(self, f)\n \n def __str__(self): # pragma: no cover\n- return \"%(op)s[%(ap)s]\" % dict(\n- op=self.__class__.__name__, ap=self.approx.__class__.__name__\n- )\n+ return f\"{self.__class__.__name__}[{self.approx.__class__.__name__}]\"\n \n \n def collect_shared_to_list(params):\n@@ -701,8 +699,8 @@ class Group(WithMemoization):\n def group_for_params(cls, params):\n if frozenset(params) not in cls.__param_registry:\n raise KeyError(\n- \"No such group for the following params: {!r}, \"\n- \"only the following are supported\\n\\n{}\".format(params, cls.__param_registry)\n+ f\"No such group for the following params: {params!r}, \"\n+ f\"only the following are supported\\n\\n{cls.__param_registry}\"\n )\n return cls.__param_registry[frozenset(params)]\n \n@@ -801,9 +799,7 @@ class Group(WithMemoization):\n if givens != needed:\n raise ParametrizationError(\n \"Passed parameters do not have a needed set of keys, \"\n- \"they should be equal, got {givens}, needed {needed}\".format(\n- givens=givens, needed=needed\n- )\n+ f\"they should be equal, got {givens}, needed {needed}\"\n )\n self._user_params = dict()\n spec = self.get_param_spec_for(d=self.ddim, **kwargs.pop(\"spec_kw\", {}))\ndiff --git a/pymc/variational/updates.py b/pymc/variational/updates.py\nindex fa6e52b0e..4f46970d1 100644\n--- a/pymc/variational/updates.py\n+++ b/pymc/variational/updates.py\n@@ -1005,9 +1005,7 @@ def norm_constraint(tensor_var, max_norm, norm_axes=None, epsilon=1e-7):\n elif ndim in [3, 4, 5]: # Conv{1,2,3}DLayer\n sum_over = tuple(range(1, ndim))\n else:\n- raise ValueError(\n- \"Unsupported tensor dimensionality {}.\" \"Must specify `norm_axes`\".format(ndim)\n- )\n+ raise ValueError(f\"Unsupported tensor dimensionality {ndim}.\" \"Must specify `norm_axes`\")\n \n dtype = np.dtype(pytensor.config.floatX).type\n norms = pt.sqrt(pt.sum(pt.sqr(tensor_var), axis=sum_over, keepdims=True))\ndiff --git a/versioneer.py b/versioneer.py\nindex cf8adcc9e..9c8f7b060 100644\n--- a/versioneer.py\n+++ b/versioneer.py\n@@ -325,8 +325,7 @@ def get_root():\n vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0])\n if me_dir != vsr_dir:\n print(\n- \"Warning: build in %s is using versioneer.py from %s\"\n- % (os.path.dirname(my_path), versioneer_py)\n+ f\"Warning: build in {os.path.dirname(my_path)} is using versioneer.py from {versioneer_py}\"\n )\n except NameError:\n pass\n@@ -1384,10 +1383,7 @@ def versions_from_parentdir(parentdir_prefix, root, verbose):\n root = os.path.dirname(root) # up a level\n \n if verbose:\n- print(\n- \"Tried directories %s but none started with prefix %s\"\n- % (str(rootdirs), parentdir_prefix)\n- )\n+ print(f\"Tried directories {str(rootdirs)} but none started with prefix {parentdir_prefix}\")\n raise NotThisMethod(\"rootdir doesn't start with parentdir_prefix\")\n \n \n", "difficulty": 2, "changed_files": ["docs/source/contributing/python_style.md", "pymc/_version.py", "pymc/gp/util.py", "pymc/sampling/population.py", "pymc/step_methods/arraystep.py", "pymc/step_methods/hmc/integration.py", "pymc/variational/inference.py", "pymc/variational/opvi.py", "pymc/variational/updates.py", "versioneer.py"], "commit_link": "https://github.com/pymc-devs/pymc/tree/92937f3e3c898a7c90b04b04341d4f1b75e275cf"}
data/python/9981ca1.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"language": "Python", "id": 11, "repo_owner": "pymc-devs", "repo_name": "pymc", "head_branch": "ruff_linter", "workflow_name": "pre-commit", "workflow_filename": "pre-commit.yml", "workflow_path": ".github/workflows/pre-commit.yml", "contributor": "juanitorduz", "sha_fail": "9981ca154ba03a88deaa96d16b119de6183017e5", "sha_success": "c50bdf8c2e84c61953b892b8b80ea724bf1746b4", "workflow": "name: pre-commit\n\non:\n pull_request:\n push:\n branches: [main]\n\njobs:\n pre-commit:\n runs-on: ubuntu-latest\n env:\n SKIP: no-commit-to-branch\n steps:\n - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11\n - uses: actions/setup-python@v5\n with:\n python-version: \"3.9\" # Run pre-commit on oldest supported Python version\n - uses: pre-commit/[email protected]\n mypy:\n runs-on: ubuntu-latest\n defaults:\n run:\n shell: bash -l {0}\n steps:\n - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11\n - name: Cache conda\n uses: actions/cache@v3\n env:\n # Increase this value to reset cache if environment-test.yml has not changed\n CACHE_NUMBER: 0\n with:\n path: ~/conda_pkgs_dir\n key: ${{ runner.os }}-py39-conda-${{ env.CACHE_NUMBER }}-${{\n hashFiles('conda-envs/environment-test.yml') }}\n - name: Cache multiple paths\n uses: actions/cache@v3\n env:\n # Increase this value to reset cache if requirements.txt has not changed\n CACHE_NUMBER: 0\n with:\n path: |\n ~/.cache/pip\n $RUNNER_TOOL_CACHE/Python/*\n ~\\AppData\\Local\\pip\\Cache\n key: ${{ runner.os }}-build-${{ matrix.python-version }}-${{ env.CACHE_NUMBER }}-${{\n hashFiles('requirements.txt') }}\n - uses: conda-incubator/setup-miniconda@v2\n with:\n miniforge-variant: Mambaforge\n miniforge-version: latest\n mamba-version: \"*\"\n activate-environment: pymc-test\n channel-priority: strict\n environment-file: conda-envs/environment-test.yml\n python-version: \"3.9\" # Run pre-commit on oldest supported Python version\n use-mamba: true\n use-only-tar-bz2: false # IMPORTANT: This may break caching of conda packages! See https://github.com/conda-incubator/setup-miniconda/issues/267\n - name: Install-pymc and mypy dependencies\n run: |\n conda activate pymc-test\n pip install -e .\n pip install --pre -U polyagamma\n python --version\n - name: Run mypy\n run: |\n conda activate pymc-test\n python ./scripts/run_mypy.py --verbose\n", "logs": [{"step_name": "pre-commit/4_Run [email protected]", "log": "##[group]Run pre-commit/[email protected]\nwith:\n extra_args: --all-files\nenv:\n SKIP: no-commit-to-branch\n pythonLocation: /opt/hostedtoolcache/Python/3.9.18/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib\n##[endgroup]\n##[group]Run python -m pip install pre-commit\n\u001b[36;1mpython -m pip install pre-commit\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\nenv:\n SKIP: no-commit-to-branch\n pythonLocation: /opt/hostedtoolcache/Python/3.9.18/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib\n##[endgroup]\nCollecting pre-commit\n Downloading pre_commit-3.6.0-py2.py3-none-any.whl.metadata (1.3 kB)\nCollecting cfgv>=2.0.0 (from pre-commit)\n Downloading cfgv-3.4.0-py2.py3-none-any.whl.metadata (8.5 kB)\nCollecting identify>=1.0.0 (from pre-commit)\n Downloading identify-2.5.33-py2.py3-none-any.whl.metadata (4.4 kB)\nCollecting nodeenv>=0.11.1 (from pre-commit)\n Downloading nodeenv-1.8.0-py2.py3-none-any.whl.metadata (21 kB)\nCollecting pyyaml>=5.1 (from pre-commit)\n Downloading PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (2.1 kB)\nCollecting virtualenv>=20.10.0 (from pre-commit)\n Downloading virtualenv-20.25.0-py3-none-any.whl.metadata (4.5 kB)\nRequirement already satisfied: setuptools in /opt/hostedtoolcache/Python/3.9.18/x64/lib/python3.9/site-packages (from nodeenv>=0.11.1->pre-commit) (58.1.0)\nCollecting distlib<1,>=0.3.7 (from virtualenv>=20.10.0->pre-commit)\n Downloading distlib-0.3.8-py2.py3-none-any.whl.metadata (5.1 kB)\nCollecting filelock<4,>=3.12.2 (from virtualenv>=20.10.0->pre-commit)\n Downloading filelock-3.13.1-py3-none-any.whl.metadata (2.8 kB)\nCollecting platformdirs<5,>=3.9.1 (from virtualenv>=20.10.0->pre-commit)\n Downloading platformdirs-4.1.0-py3-none-any.whl.metadata (11 kB)\nDownloading pre_commit-3.6.0-py2.py3-none-any.whl (204 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 204.0/204.0 kB 14.2 MB/s eta 0:00:00\nDownloading cfgv-3.4.0-py2.py3-none-any.whl (7.2 kB)\nDownloading identify-2.5.33-py2.py3-none-any.whl (98 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 98.9/98.9 kB 29.1 MB/s eta 0:00:00\nDownloading nodeenv-1.8.0-py2.py3-none-any.whl (22 kB)\nDownloading PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (738 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 738.9/738.9 kB 67.7 MB/s eta 0:00:00\nDownloading virtualenv-20.25.0-py3-none-any.whl (3.8 MB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 3.8/3.8 MB 93.7 MB/s eta 0:00:00\nDownloading distlib-0.3.8-py2.py3-none-any.whl (468 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 468.9/468.9 kB 77.3 MB/s eta 0:00:00\nDownloading filelock-3.13.1-py3-none-any.whl (11 kB)\nDownloading platformdirs-4.1.0-py3-none-any.whl (17 kB)\nInstalling collected packages: distlib, pyyaml, platformdirs, nodeenv, identify, filelock, cfgv, virtualenv, pre-commit\nSuccessfully installed cfgv-3.4.0 distlib-0.3.8 filelock-3.13.1 identify-2.5.33 nodeenv-1.8.0 platformdirs-4.1.0 pre-commit-3.6.0 pyyaml-6.0.1 virtualenv-20.25.0\n\n[notice] A new release of pip is available: 23.0.1 -> 23.3.2\n[notice] To update, run: pip install --upgrade pip\n##[group]Run python -m pip freeze --local\n\u001b[36;1mpython -m pip freeze --local\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\nenv:\n SKIP: no-commit-to-branch\n pythonLocation: /opt/hostedtoolcache/Python/3.9.18/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib\n##[endgroup]\ncfgv==3.4.0\ndistlib==0.3.8\nfilelock==3.13.1\nidentify==2.5.33\nnodeenv==1.8.0\nplatformdirs==4.1.0\npre-commit==3.6.0\nPyYAML==6.0.1\nvirtualenv==20.25.0\n##[group]Run actions/cache@v3\nwith:\n path: ~/.cache/pre-commit\n key: pre-commit-3|/opt/hostedtoolcache/Python/3.9.18/x64|8521b1deaccc96b7bf978113f6a9d2819d4547b59963b2c02ead71c093c77e29\n enableCrossOsArchive: false\n fail-on-cache-miss: false\n lookup-only: false\nenv:\n SKIP: no-commit-to-branch\n pythonLocation: /opt/hostedtoolcache/Python/3.9.18/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib\n##[endgroup]\nCache not found for input keys: pre-commit-3|/opt/hostedtoolcache/Python/3.9.18/x64|8521b1deaccc96b7bf978113f6a9d2819d4547b59963b2c02ead71c093c77e29\n##[group]Run pre-commit run --show-diff-on-failure --color=always --all-files\n\u001b[36;1mpre-commit run --show-diff-on-failure --color=always --all-files\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\nenv:\n SKIP: no-commit-to-branch\n pythonLocation: /opt/hostedtoolcache/Python/3.9.18/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib\n##[endgroup]\n[INFO]\u001b[m Initializing environment for https://github.com/pre-commit/pre-commit-hooks.\n[INFO]\u001b[m Initializing environment for https://github.com/lucianopaz/head_of_apache.\n[INFO]\u001b[m Initializing environment for https://github.com/asottile/pyupgrade.\n[INFO]\u001b[m Initializing environment for https://github.com/astral-sh/ruff-pre-commit.\n[INFO]\u001b[m Initializing environment for https://github.com/PyCQA/pydocstyle.\n[INFO]\u001b[m Initializing environment for https://github.com/MarcoGorelli/madforhooks.\n[INFO]\u001b[m Initializing environment for local:pandas,pyyaml.\n[INFO]\u001b[m Initializing environment for local:pyyaml.\n[INFO]\u001b[m Installing environment for https://github.com/pre-commit/pre-commit-hooks.\n[INFO]\u001b[m Once installed this environment will be reused.\n[INFO]\u001b[m This may take a few minutes...\n[INFO]\u001b[m Installing environment for https://github.com/lucianopaz/head_of_apache.\n[INFO]\u001b[m Once installed this environment will be reused.\n[INFO]\u001b[m This may take a few minutes...\n[INFO]\u001b[m Installing environment for https://github.com/asottile/pyupgrade.\n[INFO]\u001b[m Once installed this environment will be reused.\n[INFO]\u001b[m This may take a few minutes...\n[INFO]\u001b[m Installing environment for https://github.com/astral-sh/ruff-pre-commit.\n[INFO]\u001b[m Once installed this environment will be reused.\n[INFO]\u001b[m This may take a few minutes...\n[INFO]\u001b[m Installing environment for https://github.com/PyCQA/pydocstyle.\n[INFO]\u001b[m Once installed this environment will be reused.\n[INFO]\u001b[m This may take a few minutes...\n[INFO]\u001b[m Installing environment for https://github.com/MarcoGorelli/madforhooks.\n[INFO]\u001b[m Once installed this environment will be reused.\n[INFO]\u001b[m This may take a few minutes...\n[INFO]\u001b[m Installing environment for local.\n[INFO]\u001b[m Once installed this environment will be reused.\n[INFO]\u001b[m This may take a few minutes...\n[INFO]\u001b[m Installing environment for local.\n[INFO]\u001b[m Once installed this environment will be reused.\n[INFO]\u001b[m This may take a few minutes...\ncheck for merge conflicts............................................................\u001b[42mPassed\u001b[m\ncheck toml...........................................................................\u001b[42mPassed\u001b[m\ncheck yaml...........................................................................\u001b[42mPassed\u001b[m\ndebug statements (python)............................................................\u001b[42mPassed\u001b[m\nfix end of files.....................................................................\u001b[42mPassed\u001b[m\ndon't commit to branch..............................................................\u001b[43;30mSkipped\u001b[m\nfix requirements.txt.................................................................\u001b[42mPassed\u001b[m\ntrim trailing whitespace.............................................................\u001b[42mPassed\u001b[m\nApply Apache 2.0 License.............................................................\u001b[42mPassed\u001b[m\npyupgrade............................................................................\u001b[42mPassed\u001b[m\nruff.................................................................................\u001b[42mPassed\u001b[m\nruff-format..........................................................................\u001b[41mFailed\u001b[m\n\u001b[2m- hook id: ruff-format\u001b[m\n\u001b[2m- files were modified by this hook\u001b[m\n\n11 files reformatted, 197 files left unchanged\n\npydocstyle...........................................................................\u001b[42mPassed\u001b[m\nDisallow print statements............................................................\u001b[42mPassed\u001b[m\nCheck no tests are ignored...........................................................\u001b[42mPassed\u001b[m\nGenerate pip dependency from conda...................................................\u001b[42mPassed\u001b[m\nNo relative imports..................................................................\u001b[42mPassed\u001b[m\nCheck no links that should be cross-references are in the docs.......................\u001b[42mPassed\u001b[m\npre-commit hook(s) made changes.\nIf you are seeing this message in CI, reproduce locally with: `pre-commit run --all-files`.\nTo run `pre-commit` as part of git workflow, use `pre-commit install`.\nAll changes made by hooks:\n\u001b[1mdiff --git a/pymc/distributions/discrete.py b/pymc/distributions/discrete.py\u001b[m\n\u001b[1mindex f95b437..8771193 100644\u001b[m\n\u001b[1m--- a/pymc/distributions/discrete.py\u001b[m\n\u001b[1m+++ b/pymc/distributions/discrete.py\u001b[m\n\u001b[36m@@ -112,6 +112,7 @@\u001b[m \u001b[mclass Binomial(Discrete):\u001b[m\n logit_p : tensor_like of float\u001b[m\n Alternative log odds for the probability of success.\u001b[m\n \"\"\"\u001b[m\n\u001b[32m+\u001b[m\n rv_op = binomial\u001b[m\n \u001b[m\n @classmethod\u001b[m\n\u001b[36m@@ -334,6 +335,7 @@\u001b[m \u001b[mclass Bernoulli(Discrete):\u001b[m\n logit_p : tensor_like of float\u001b[m\n Alternative log odds for the probability of success.\u001b[m\n \"\"\"\u001b[m\n\u001b[32m+\u001b[m\n rv_op = bernoulli\u001b[m\n \u001b[m\n @classmethod\u001b[m\n\u001b[36m@@ -450,6 +452,7 @@\u001b[m \u001b[mclass DiscreteWeibull(Discrete):\u001b[m\n Shape parameter (beta > 0).\u001b[m\n \u001b[m\n \"\"\"\u001b[m\n\u001b[32m+\u001b[m\n rv_op = discrete_weibull\u001b[m\n \u001b[m\n @classmethod\u001b[m\n\u001b[36m@@ -539,6 +542,7 @@\u001b[m \u001b[mclass Poisson(Discrete):\u001b[m\n The Poisson distribution can be derived as a limiting case of the\u001b[m\n binomial distribution.\u001b[m\n \"\"\"\u001b[m\n\u001b[32m+\u001b[m\n rv_op = poisson\u001b[m\n \u001b[m\n @classmethod\u001b[m\n\u001b[36m@@ -662,6 +666,7 @@\u001b[m \u001b[mclass NegativeBinomial(Discrete):\u001b[m\n n : tensor_like of float\u001b[m\n Alternative number of target success trials (n > 0)\u001b[m\n \"\"\"\u001b[m\n\u001b[32m+\u001b[m\n rv_op = nbinom\u001b[m\n \u001b[m\n @classmethod\u001b[m\n\u001b[36m@@ -1108,6 +1113,7 @@\u001b[m \u001b[mclass Categorical(Discrete):\u001b[m\n logit_p : float\u001b[m\n Alternative log odds for the probability of success.\u001b[m\n \"\"\"\u001b[m\n\u001b[32m+\u001b[m\n rv_op = categorical\u001b[m\n \u001b[m\n @classmethod\u001b[m\n\u001b[36m@@ -1183,6 +1189,7 @@\u001b[m \u001b[mclass _OrderedLogistic(Categorical):\u001b[m\n Underlying class for ordered logistic distributions.\u001b[m\n See docs for the OrderedLogistic wrapper class for more details on how to use it in models.\u001b[m\n \"\"\"\u001b[m\n\u001b[32m+\u001b[m\n rv_op = categorical\u001b[m\n \u001b[m\n @classmethod\u001b[m\n\u001b[36m@@ -1289,6 +1296,7 @@\u001b[m \u001b[mclass _OrderedProbit(Categorical):\u001b[m\n Underlying class for ordered probit distributions.\u001b[m\n See docs for the OrderedProbit wrapper class for more details on how to use it in models.\u001b[m\n \"\"\"\u001b[m\n\u001b[32m+\u001b[m\n rv_op = categorical\u001b[m\n \u001b[m\n @classmethod\u001b[m\n\u001b[1mdiff --git a/pymc/distributions/multivariate.py b/pymc/distributions/multivariate.py\u001b[m\n\u001b[1mindex 1e5a956..570c139 100644\u001b[m\n\u001b[1m--- a/pymc/distributions/multivariate.py\u001b[m\n\u001b[1m+++ b/pymc/distributions/multivariate.py\u001b[m\n\u001b[36m@@ -235,6 +235,7 @@\u001b[m \u001b[mclass MvNormal(Continuous):\u001b[m\n vals_raw = pm.Normal('vals_raw', mu=0, sigma=1, shape=(5, 3))\u001b[m\n vals = pm.Deterministic('vals', pt.dot(chol, vals_raw.T).T)\u001b[m\n \"\"\"\u001b[m\n\u001b[32m+\u001b[m\n rv_op = multivariate_normal\u001b[m\n \u001b[m\n @classmethod\u001b[m\n\u001b[36m@@ -355,6 +356,7 @@\u001b[m \u001b[mclass MvStudentT(Continuous):\u001b[m\n lower : bool, default=True\u001b[m\n Whether the cholesky fatcor is given as a lower triangular matrix.\u001b[m\n \"\"\"\u001b[m\n\u001b[32m+\u001b[m\n rv_op = mv_studentt\u001b[m\n \u001b[m\n @classmethod\u001b[m\n\u001b[36m@@ -436,6 +438,7 @@\u001b[m \u001b[mclass Dirichlet(SimplexContinuous):\u001b[m\n Concentration parameters (a > 0). The number of categories is given by the\u001b[m\n length of the last axis.\u001b[m\n \"\"\"\u001b[m\n\u001b[32m+\u001b[m\n rv_op = dirichlet\u001b[m\n \u001b[m\n @classmethod\u001b[m\n\u001b[36m@@ -515,6 +518,7 @@\u001b[m \u001b[mclass Multinomial(Discrete):\u001b[m\n categories is given by the length of the last axis. Elements are expected to sum\u001b[m\n to 1 along the last axis.\u001b[m\n \"\"\"\u001b[m\n\u001b[32m+\u001b[m\n rv_op = multinomial\u001b[m\n \u001b[m\n @classmethod\u001b[m\n\u001b[36m@@ -662,6 +666,7 @@\u001b[m \u001b[mclass DirichletMultinomial(Discrete):\u001b[m\n Dirichlet concentration parameters (a > 0). The number of categories is given by\u001b[m\n the length of the last axis.\u001b[m\n \"\"\"\u001b[m\n\u001b[32m+\u001b[m\n rv_op = dirichlet_multinomial\u001b[m\n \u001b[m\n @classmethod\u001b[m\n\u001b[36m@@ -716,6 +721,7 @@\u001b[m \u001b[mclass _OrderedMultinomial(Multinomial):\u001b[m\n Underlying class for ordered multinomial distributions.\u001b[m\n See docs for the OrderedMultinomial wrapper class for more details on how to use it in models.\u001b[m\n \"\"\"\u001b[m\n\u001b[32m+\u001b[m\n rv_op = multinomial\u001b[m\n \u001b[m\n @classmethod\u001b[m\n\u001b[36m@@ -940,6 +946,7 @@\u001b[m \u001b[mclass Wishart(Continuous):\u001b[m\n This distribution is unusable in a PyMC model. You should instead\u001b[m\n use LKJCholeskyCov or LKJCorr.\u001b[m\n \"\"\"\u001b[m\n\u001b[32m+\u001b[m\n rv_op = wishart\u001b[m\n \u001b[m\n @classmethod\u001b[m\n\u001b[36m@@ -1763,6 +1770,7 @@\u001b[m \u001b[mclass MatrixNormal(Continuous):\u001b[m\n vals = pm.MatrixNormal('vals', mu=mu, colchol=colchol, rowcov=rowcov,\u001b[m\n observed=data)\u001b[m\n \"\"\"\u001b[m\n\u001b[32m+\u001b[m\n rv_op = matrixnormal\u001b[m\n \u001b[m\n @classmethod\u001b[m\n\u001b[36m@@ -1977,6 +1985,7 @@\u001b[m \u001b[mclass KroneckerNormal(Continuous):\u001b[m\n ----------\u001b[m\n .. [1] Saatchi, Y. (2011). \"Scalable inference for structured Gaussian process models\"\u001b[m\n \"\"\"\u001b[m\n\u001b[32m+\u001b[m\n rv_op = kroneckernormal\u001b[m\n \u001b[m\n @classmethod\u001b[m\n\u001b[36m@@ -2183,6 +2192,7 @@\u001b[m \u001b[mclass CAR(Continuous):\u001b[m\n \"Generalized Hierarchical Multivariate CAR Models for Areal Data\"\u001b[m\n Biometrics, Vol. 61, No. 4 (Dec., 2005), pp. 950-961\u001b[m\n \"\"\"\u001b[m\n\u001b[32m+\u001b[m\n rv_op = car\u001b[m\n \u001b[m\n @classmethod\u001b[m\n\u001b[36m@@ -2400,9 +2410,7 @@\u001b[m \u001b[mclass ICAR(Continuous):\u001b[m\n return pt.zeros(N)\u001b[m\n \u001b[m\n def logp(value, W, node1, node2, N, sigma, zero_sum_stdev):\u001b[m\n\u001b[31m- pairwise_difference = (-1 / (2 * sigma**2)) * pt.sum(\u001b[m\n\u001b[31m- pt.square(value[node1] - value[node2])\u001b[m\n\u001b[31m- )\u001b[m\n\u001b[32m+\u001b[m\u001b[32m pairwise_difference = (-1 / (2 * sigma**2)) * pt.sum(pt.square(value[node1] - value[node2]))\u001b[m\n zero_sum = (\u001b[m\n -0.5 * pt.pow(pt.sum(value) / (zero_sum_stdev * N), 2)\u001b[m\n - pt.log(pt.sqrt(2.0 * np.pi))\u001b[m\n\u001b[36m@@ -2498,6 +2506,7 @@\u001b[m \u001b[mclass StickBreakingWeights(SimplexContinuous):\u001b[m\n .. [2] M\u00fcller, P., Quintana, F. A., Jara, A., & Hanson, T. (2015). Bayesian nonparametric data\u001b[m\n analysis. New York: Springer.\u001b[m\n \"\"\"\u001b[m\n\u001b[32m+\u001b[m\n rv_op = stickbreakingweights\u001b[m\n \u001b[m\n @classmethod\u001b[m\n\u001b[36m@@ -2641,6 +2650,7 @@\u001b[m \u001b[mclass ZeroSumNormal(Distribution):\u001b[m\n # the zero sum axes will be the last two\u001b[m\n v = pm.ZeroSumNormal(\"v\", shape=(3, 4, 5), n_zerosum_axes=2)\u001b[m\n \"\"\"\u001b[m\n\u001b[32m+\u001b[m\n rv_type = ZeroSumNormalRV\u001b[m\n \u001b[m\n def __new__(\u001b[m\n\u001b[1mdiff --git a/pymc/model/core.py b/pymc/model/core.py\u001b[m\n\u001b[1mindex c45f3f5..6ee6d49 100644\u001b[m\n\u001b[1m--- a/pymc/model/core.py\u001b[m\n\u001b[1m+++ b/pymc/model/core.py\u001b[m\n\u001b[36m@@ -138,9 +138,7 @@\u001b[m \u001b[mclass ContextMeta(type):\u001b[m\n \u001b[m\n # FIXME: is there a more elegant way to automatically add methods to the class that\u001b[m\n # are instance methods instead of class methods?\u001b[m\n\u001b[31m- def __init__(\u001b[m\n\u001b[31m- cls, name, bases, nmspc, context_class: Optional[Type] = None, **kwargs\u001b[m\n\u001b[31m- ): # pylint: disable=unused-argument\u001b[m\n\u001b[32m+\u001b[m\u001b[32m def __init__(cls, name, bases, nmspc, context_class: Optional[Type] = None, **kwargs): # pylint: disable=unused-argument\u001b[m\n \"\"\"Add ``__enter__`` and ``__exit__`` methods to the new class automatically.\"\"\"\u001b[m\n if context_class is not None:\u001b[m\n cls._context_class = context_class\u001b[m\n\u001b[36m@@ -1740,7 +1738,7 @@\u001b[m \u001b[mclass Model(WithMemoization, metaclass=ContextMeta):\u001b[m\n done = {}\u001b[m\n used_ids = {}\u001b[m\n for i, out in enumerate(rv_inputs.maker.fgraph.outputs):\u001b[m\n\u001b[31m- print_(f\"{i}: \", end=\"\"),\u001b[m\n\u001b[32m+\u001b[m\u001b[32m (print_(f\"{i}: \", end=\"\"),)\u001b[m\n # Don't print useless deepcopys\u001b[m\n if out.owner and isinstance(out.owner.op, DeepCopyOp):\u001b[m\n out = out.owner.inputs[0]\u001b[m\n\u001b[1mdiff --git a/pymc/ode/ode.py b/pymc/ode/ode.py\u001b[m\n\u001b[1mindex a5e3741..600f306 100644\u001b[m\n\u001b[1m--- a/pymc/ode/ode.py\u001b[m\n\u001b[1m+++ b/pymc/ode/ode.py\u001b[m\n\u001b[36m@@ -67,6 +67,7 @@\u001b[m \u001b[mclass DifferentialEquation(Op):\u001b[m\n ode_model = DifferentialEquation(func=odefunc, times=times, n_states=1, n_theta=1, t0=0)\u001b[m\n \u001b[m\n \"\"\"\u001b[m\n\u001b[32m+\u001b[m\n _itypes = [\u001b[m\n TensorType(floatX, (False,)), # y0 as 1D floatX vector\u001b[m\n TensorType(floatX, (False,)), # theta as 1D floatX vector\u001b[m\n\u001b[1mdiff --git a/pymc/printing.py b/pymc/printing.py\u001b[m\n\u001b[1mindex ffc943a..9fe7d05 100644\u001b[m\n\u001b[1m--- a/pymc/printing.py\u001b[m\n\u001b[1m+++ b/pymc/printing.py\u001b[m\n\u001b[36m@@ -123,9 +123,7 @@\u001b[m \u001b[mdef str_for_model(model: Model, formatting: str = \"plain\", include_params: bool\u001b[m\n \\begin{{array}}{{rcl}}\u001b[m\n {}\u001b[m\n \\end{{array}}\u001b[m\n\u001b[31m- $$\"\"\".format(\u001b[m\n\u001b[31m- \"\\\\\\\\\".join(var_reprs)\u001b[m\n\u001b[31m- )\u001b[m\n\u001b[32m+\u001b[m\u001b[32m $$\"\"\".format(\"\\\\\\\\\".join(var_reprs))\u001b[m\n else:\u001b[m\n # align vars on their ~\u001b[m\n names = [s[: s.index(\"~\") - 1] for s in var_reprs]\u001b[m\n\u001b[1mdiff --git a/pymc/step_methods/metropolis.py b/pymc/step_methods/metropolis.py\u001b[m\n\u001b[1mindex 1adb462..e080cdd 100644\u001b[m\n\u001b[1m--- a/pymc/step_methods/metropolis.py\u001b[m\n\u001b[1m+++ b/pymc/step_methods/metropolis.py\u001b[m\n\u001b[36m@@ -134,7 +134,7 @@\u001b[m \u001b[mclass Metropolis(ArrayStepShared):\u001b[m\n tune_interval=100,\u001b[m\n model=None,\u001b[m\n mode=None,\u001b[m\n\u001b[31m- **kwargs\u001b[m\n\u001b[32m+\u001b[m\u001b[32m **kwargs,\u001b[m\n ):\u001b[m\n \"\"\"Create an instance of a Metropolis stepper\u001b[m\n \u001b[m\n\u001b[36m@@ -771,7 +771,7 @@\u001b[m \u001b[mclass DEMetropolis(PopulationArrayStepShared):\u001b[m\n tune_interval=100,\u001b[m\n model=None,\u001b[m\n mode=None,\u001b[m\n\u001b[31m- **kwargs\u001b[m\n\u001b[32m+\u001b[m\u001b[32m **kwargs,\u001b[m\n ):\u001b[m\n model = pm.modelcontext(model)\u001b[m\n initial_values = model.initial_point()\u001b[m\n\u001b[36m@@ -915,7 +915,7 @@\u001b[m \u001b[mclass DEMetropolisZ(ArrayStepShared):\u001b[m\n tune_drop_fraction: float = 0.9,\u001b[m\n model=None,\u001b[m\n mode=None,\u001b[m\n\u001b[31m- **kwargs\u001b[m\n\u001b[32m+\u001b[m\u001b[32m **kwargs,\u001b[m\n ):\u001b[m\n model = pm.modelcontext(model)\u001b[m\n initial_values = model.initial_point()\u001b[m\n\u001b[1mdiff --git a/pymc/tuning/starting.py b/pymc/tuning/starting.py\u001b[m\n\u001b[1mindex 6a4d338..ad5f554 100644\u001b[m\n\u001b[1m--- a/pymc/tuning/starting.py\u001b[m\n\u001b[1m+++ b/pymc/tuning/starting.py\u001b[m\n\u001b[36m@@ -52,7 +52,7 @@\u001b[m \u001b[mdef find_MAP(\u001b[m\n model=None,\u001b[m\n *args,\u001b[m\n seed: Optional[int] = None,\u001b[m\n\u001b[31m- **kwargs\u001b[m\n\u001b[32m+\u001b[m\u001b[32m **kwargs,\u001b[m\n ):\u001b[m\n \"\"\"Finds the local maximum a posteriori point given a model.\u001b[m\n \u001b[m\n\u001b[1mdiff --git a/pymc/variational/approximations.py b/pymc/variational/approximations.py\u001b[m\n\u001b[1mindex 00df445..feb0a3a 100644\u001b[m\n\u001b[1m--- a/pymc/variational/approximations.py\u001b[m\n\u001b[1m+++ b/pymc/variational/approximations.py\u001b[m\n\u001b[36m@@ -46,6 +46,7 @@\u001b[m \u001b[mclass MeanFieldGroup(Group):\u001b[m\n that latent space variables are uncorrelated that is the main drawback\u001b[m\n of the method\u001b[m\n \"\"\"\u001b[m\n\u001b[32m+\u001b[m\n __param_spec__ = dict(mu=(\"d\",), rho=(\"d\",))\u001b[m\n short_name = \"mean_field\"\u001b[m\n alias_names = frozenset([\"mf\"])\u001b[m\n\u001b[36m@@ -350,27 +351,21 @@\u001b[m \u001b[mclass SingleGroupApproximation(Approximation):\u001b[m\n class MeanField(SingleGroupApproximation):\u001b[m\n __doc__ = \"\"\"**Single Group Mean Field Approximation**\u001b[m\n \u001b[m\n\u001b[31m- \"\"\" + str(\u001b[m\n\u001b[31m- MeanFieldGroup.__doc__\u001b[m\n\u001b[31m- )\u001b[m\n\u001b[32m+\u001b[m\u001b[32m \"\"\" + str(MeanFieldGroup.__doc__)\u001b[m\n _group_class = MeanFieldGroup\u001b[m\n \u001b[m\n \u001b[m\n class FullRank(SingleGroupApproximation):\u001b[m\n __doc__ = \"\"\"**Single Group Full Rank Approximation**\u001b[m\n \u001b[m\n\u001b[31m- \"\"\" + str(\u001b[m\n\u001b[31m- FullRankGroup.__doc__\u001b[m\n\u001b[31m- )\u001b[m\n\u001b[32m+\u001b[m\u001b[32m \"\"\" + str(FullRankGroup.__doc__)\u001b[m\n _group_class = FullRankGroup\u001b[m\n \u001b[m\n \u001b[m\n class Empirical(SingleGroupApproximation):\u001b[m\n __doc__ = \"\"\"**Single Group Full Rank Approximation**\u001b[m\n \u001b[m\n\u001b[31m- \"\"\" + str(\u001b[m\n\u001b[31m- EmpiricalGroup.__doc__\u001b[m\n\u001b[31m- )\u001b[m\n\u001b[32m+\u001b[m\u001b[32m \"\"\" + str(EmpiricalGroup.__doc__)\u001b[m\n _group_class = EmpiricalGroup\u001b[m\n \u001b[m\n def __init__(self, trace=None, size=None, **kwargs):\u001b[m\n\u001b[1mdiff --git a/pymc/variational/operators.py b/pymc/variational/operators.py\u001b[m\n\u001b[1mindex 1122a70..f6ef095 100644\u001b[m\n\u001b[1m--- a/pymc/variational/operators.py\u001b[m\n\u001b[1m+++ b/pymc/variational/operators.py\u001b[m\n\u001b[36m@@ -130,6 +130,7 @@\u001b[m \u001b[mclass KSD(Operator):\u001b[m\n Stein Variational Gradient Descent: A General Purpose Bayesian Inference Algorithm\u001b[m\n arXiv:1608.04471\u001b[m\n \"\"\"\u001b[m\n\u001b[32m+\u001b[m\n has_test_function = True\u001b[m\n returns_loss = False\u001b[m\n require_logq = False\u001b[m\n\u001b[1mdiff --git a/pymc/variational/opvi.py b/pymc/variational/opvi.py\u001b[m\n\u001b[1mindex cf98c98..bd1874f 100644\u001b[m\n\u001b[1m--- a/pymc/variational/opvi.py\u001b[m\n\u001b[1m+++ b/pymc/variational/opvi.py\u001b[m\n\u001b[36m@@ -663,6 +663,7 @@\u001b[m \u001b[mclass Group(WithMemoization):\u001b[m\n - Kingma, D. P., & Welling, M. (2014).\u001b[m\n `Auto-Encoding Variational Bayes. stat, 1050, 1. <https://arxiv.org/abs/1312.6114>`_\u001b[m\n \"\"\"\u001b[m\n\u001b[32m+\u001b[m\n # needs to be defined in init\u001b[m\n shared_params = None\u001b[m\n symbolic_initial = None\u001b[m\n\u001b[36m@@ -709,8 +710,9 @@\u001b[m \u001b[mclass Group(WithMemoization):\u001b[m\n def group_for_short_name(cls, name):\u001b[m\n if name.lower() not in cls.__name_registry:\u001b[m\n raise KeyError(\u001b[m\n\u001b[31m- \"No such group: {!r}, \"\u001b[m\n\u001b[31m- \"only the following are supported\\n\\n{}\".format(name, cls.__name_registry)\u001b[m\n\u001b[32m+\u001b[m\u001b[32m \"No such group: {!r}, \" \"only the following are supported\\n\\n{}\".format(\u001b[m\n\u001b[32m+\u001b[m\u001b[32m name, cls.__name_registry\u001b[m\n\u001b[32m+\u001b[m\u001b[32m )\u001b[m\n )\u001b[m\n return cls.__name_registry[name.lower()]\u001b[m\n \u001b[m\n\u001b[1mdiff --git a/versioneer.py b/versioneer.py\u001b[m\n\u001b[1mindex a560e68..c2b9d28 100644\u001b[m\n\u001b[1m--- a/versioneer.py\u001b[m\n\u001b[1m+++ b/versioneer.py\u001b[m\n\u001b[36m@@ -432,9 +432,7 @@\u001b[m \u001b[mdef run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=\u001b[m\n return stdout, process.returncode\u001b[m\n \u001b[m\n \u001b[m\n\u001b[31m-LONG_VERSION_PY[\u001b[m\n\u001b[31m- \"git\"\u001b[m\n\u001b[31m-] = r'''\u001b[m\n\u001b[32m+\u001b[m\u001b[32mLONG_VERSION_PY[\"git\"] = r'''\u001b[m\n # This file helps to compute a version number in source trees obtained from\u001b[m\n # git-archive tarball (such as those provided by githubs download-from-tag\u001b[m\n # feature). Distribution tarballs (built by setup.py sdist) and build\u001b[m\n##[error]Process completed with exit code 1.\n"}], "diff": "diff --git a/pymc/distributions/discrete.py b/pymc/distributions/discrete.py\nindex f95b4374d..877119350 100644\n--- a/pymc/distributions/discrete.py\n+++ b/pymc/distributions/discrete.py\n@@ -112,6 +112,7 @@ class Binomial(Discrete):\n logit_p : tensor_like of float\n Alternative log odds for the probability of success.\n \"\"\"\n+\n rv_op = binomial\n \n @classmethod\n@@ -334,6 +335,7 @@ class Bernoulli(Discrete):\n logit_p : tensor_like of float\n Alternative log odds for the probability of success.\n \"\"\"\n+\n rv_op = bernoulli\n \n @classmethod\n@@ -450,6 +452,7 @@ class DiscreteWeibull(Discrete):\n Shape parameter (beta > 0).\n \n \"\"\"\n+\n rv_op = discrete_weibull\n \n @classmethod\n@@ -539,6 +542,7 @@ class Poisson(Discrete):\n The Poisson distribution can be derived as a limiting case of the\n binomial distribution.\n \"\"\"\n+\n rv_op = poisson\n \n @classmethod\n@@ -662,6 +666,7 @@ class NegativeBinomial(Discrete):\n n : tensor_like of float\n Alternative number of target success trials (n > 0)\n \"\"\"\n+\n rv_op = nbinom\n \n @classmethod\n@@ -1108,6 +1113,7 @@ class Categorical(Discrete):\n logit_p : float\n Alternative log odds for the probability of success.\n \"\"\"\n+\n rv_op = categorical\n \n @classmethod\n@@ -1183,6 +1189,7 @@ class _OrderedLogistic(Categorical):\n Underlying class for ordered logistic distributions.\n See docs for the OrderedLogistic wrapper class for more details on how to use it in models.\n \"\"\"\n+\n rv_op = categorical\n \n @classmethod\n@@ -1289,6 +1296,7 @@ class _OrderedProbit(Categorical):\n Underlying class for ordered probit distributions.\n See docs for the OrderedProbit wrapper class for more details on how to use it in models.\n \"\"\"\n+\n rv_op = categorical\n \n @classmethod\ndiff --git a/pymc/distributions/multivariate.py b/pymc/distributions/multivariate.py\nindex 1e5a9567a..570c13988 100644\n--- a/pymc/distributions/multivariate.py\n+++ b/pymc/distributions/multivariate.py\n@@ -235,6 +235,7 @@ class MvNormal(Continuous):\n vals_raw = pm.Normal('vals_raw', mu=0, sigma=1, shape=(5, 3))\n vals = pm.Deterministic('vals', pt.dot(chol, vals_raw.T).T)\n \"\"\"\n+\n rv_op = multivariate_normal\n \n @classmethod\n@@ -355,6 +356,7 @@ class MvStudentT(Continuous):\n lower : bool, default=True\n Whether the cholesky fatcor is given as a lower triangular matrix.\n \"\"\"\n+\n rv_op = mv_studentt\n \n @classmethod\n@@ -436,6 +438,7 @@ class Dirichlet(SimplexContinuous):\n Concentration parameters (a > 0). The number of categories is given by the\n length of the last axis.\n \"\"\"\n+\n rv_op = dirichlet\n \n @classmethod\n@@ -515,6 +518,7 @@ class Multinomial(Discrete):\n categories is given by the length of the last axis. Elements are expected to sum\n to 1 along the last axis.\n \"\"\"\n+\n rv_op = multinomial\n \n @classmethod\n@@ -662,6 +666,7 @@ class DirichletMultinomial(Discrete):\n Dirichlet concentration parameters (a > 0). The number of categories is given by\n the length of the last axis.\n \"\"\"\n+\n rv_op = dirichlet_multinomial\n \n @classmethod\n@@ -716,6 +721,7 @@ class _OrderedMultinomial(Multinomial):\n Underlying class for ordered multinomial distributions.\n See docs for the OrderedMultinomial wrapper class for more details on how to use it in models.\n \"\"\"\n+\n rv_op = multinomial\n \n @classmethod\n@@ -940,6 +946,7 @@ class Wishart(Continuous):\n This distribution is unusable in a PyMC model. You should instead\n use LKJCholeskyCov or LKJCorr.\n \"\"\"\n+\n rv_op = wishart\n \n @classmethod\n@@ -1763,6 +1770,7 @@ class MatrixNormal(Continuous):\n vals = pm.MatrixNormal('vals', mu=mu, colchol=colchol, rowcov=rowcov,\n observed=data)\n \"\"\"\n+\n rv_op = matrixnormal\n \n @classmethod\n@@ -1977,6 +1985,7 @@ class KroneckerNormal(Continuous):\n ----------\n .. [1] Saatchi, Y. (2011). \"Scalable inference for structured Gaussian process models\"\n \"\"\"\n+\n rv_op = kroneckernormal\n \n @classmethod\n@@ -2183,6 +2192,7 @@ class CAR(Continuous):\n \"Generalized Hierarchical Multivariate CAR Models for Areal Data\"\n Biometrics, Vol. 61, No. 4 (Dec., 2005), pp. 950-961\n \"\"\"\n+\n rv_op = car\n \n @classmethod\n@@ -2400,9 +2410,7 @@ class ICAR(Continuous):\n return pt.zeros(N)\n \n def logp(value, W, node1, node2, N, sigma, zero_sum_stdev):\n- pairwise_difference = (-1 / (2 * sigma**2)) * pt.sum(\n- pt.square(value[node1] - value[node2])\n- )\n+ pairwise_difference = (-1 / (2 * sigma**2)) * pt.sum(pt.square(value[node1] - value[node2]))\n zero_sum = (\n -0.5 * pt.pow(pt.sum(value) / (zero_sum_stdev * N), 2)\n - pt.log(pt.sqrt(2.0 * np.pi))\n@@ -2498,6 +2506,7 @@ class StickBreakingWeights(SimplexContinuous):\n .. [2] M\u00fcller, P., Quintana, F. A., Jara, A., & Hanson, T. (2015). Bayesian nonparametric data\n analysis. New York: Springer.\n \"\"\"\n+\n rv_op = stickbreakingweights\n \n @classmethod\n@@ -2641,6 +2650,7 @@ class ZeroSumNormal(Distribution):\n # the zero sum axes will be the last two\n v = pm.ZeroSumNormal(\"v\", shape=(3, 4, 5), n_zerosum_axes=2)\n \"\"\"\n+\n rv_type = ZeroSumNormalRV\n \n def __new__(\ndiff --git a/pymc/model/core.py b/pymc/model/core.py\nindex c45f3f550..6ee6d491a 100644\n--- a/pymc/model/core.py\n+++ b/pymc/model/core.py\n@@ -138,9 +138,7 @@ class ContextMeta(type):\n \n # FIXME: is there a more elegant way to automatically add methods to the class that\n # are instance methods instead of class methods?\n- def __init__(\n- cls, name, bases, nmspc, context_class: Optional[Type] = None, **kwargs\n- ): # pylint: disable=unused-argument\n+ def __init__(cls, name, bases, nmspc, context_class: Optional[Type] = None, **kwargs): # pylint: disable=unused-argument\n \"\"\"Add ``__enter__`` and ``__exit__`` methods to the new class automatically.\"\"\"\n if context_class is not None:\n cls._context_class = context_class\n@@ -1740,7 +1738,7 @@ class Model(WithMemoization, metaclass=ContextMeta):\n done = {}\n used_ids = {}\n for i, out in enumerate(rv_inputs.maker.fgraph.outputs):\n- print_(f\"{i}: \", end=\"\"),\n+ (print_(f\"{i}: \", end=\"\"),)\n # Don't print useless deepcopys\n if out.owner and isinstance(out.owner.op, DeepCopyOp):\n out = out.owner.inputs[0]\ndiff --git a/pymc/ode/ode.py b/pymc/ode/ode.py\nindex a5e374130..600f30632 100644\n--- a/pymc/ode/ode.py\n+++ b/pymc/ode/ode.py\n@@ -67,6 +67,7 @@ class DifferentialEquation(Op):\n ode_model = DifferentialEquation(func=odefunc, times=times, n_states=1, n_theta=1, t0=0)\n \n \"\"\"\n+\n _itypes = [\n TensorType(floatX, (False,)), # y0 as 1D floatX vector\n TensorType(floatX, (False,)), # theta as 1D floatX vector\ndiff --git a/pymc/printing.py b/pymc/printing.py\nindex ffc943aa1..9fe7d056c 100644\n--- a/pymc/printing.py\n+++ b/pymc/printing.py\n@@ -123,9 +123,7 @@ def str_for_model(model: Model, formatting: str = \"plain\", include_params: bool\n \\begin{{array}}{{rcl}}\n {}\n \\end{{array}}\n- $$\"\"\".format(\n- \"\\\\\\\\\".join(var_reprs)\n- )\n+ $$\"\"\".format(\"\\\\\\\\\".join(var_reprs))\n else:\n # align vars on their ~\n names = [s[: s.index(\"~\") - 1] for s in var_reprs]\ndiff --git a/pymc/step_methods/metropolis.py b/pymc/step_methods/metropolis.py\nindex 1adb462d9..e080cdd09 100644\n--- a/pymc/step_methods/metropolis.py\n+++ b/pymc/step_methods/metropolis.py\n@@ -134,7 +134,7 @@ class Metropolis(ArrayStepShared):\n tune_interval=100,\n model=None,\n mode=None,\n- **kwargs\n+ **kwargs,\n ):\n \"\"\"Create an instance of a Metropolis stepper\n \n@@ -771,7 +771,7 @@ class DEMetropolis(PopulationArrayStepShared):\n tune_interval=100,\n model=None,\n mode=None,\n- **kwargs\n+ **kwargs,\n ):\n model = pm.modelcontext(model)\n initial_values = model.initial_point()\n@@ -915,7 +915,7 @@ class DEMetropolisZ(ArrayStepShared):\n tune_drop_fraction: float = 0.9,\n model=None,\n mode=None,\n- **kwargs\n+ **kwargs,\n ):\n model = pm.modelcontext(model)\n initial_values = model.initial_point()\ndiff --git a/pymc/tuning/starting.py b/pymc/tuning/starting.py\nindex 6a4d33894..ad5f554ae 100644\n--- a/pymc/tuning/starting.py\n+++ b/pymc/tuning/starting.py\n@@ -52,7 +52,7 @@ def find_MAP(\n model=None,\n *args,\n seed: Optional[int] = None,\n- **kwargs\n+ **kwargs,\n ):\n \"\"\"Finds the local maximum a posteriori point given a model.\n \ndiff --git a/pymc/variational/approximations.py b/pymc/variational/approximations.py\nindex 00df44599..feb0a3a92 100644\n--- a/pymc/variational/approximations.py\n+++ b/pymc/variational/approximations.py\n@@ -46,6 +46,7 @@ class MeanFieldGroup(Group):\n that latent space variables are uncorrelated that is the main drawback\n of the method\n \"\"\"\n+\n __param_spec__ = dict(mu=(\"d\",), rho=(\"d\",))\n short_name = \"mean_field\"\n alias_names = frozenset([\"mf\"])\n@@ -350,27 +351,21 @@ class SingleGroupApproximation(Approximation):\n class MeanField(SingleGroupApproximation):\n __doc__ = \"\"\"**Single Group Mean Field Approximation**\n \n- \"\"\" + str(\n- MeanFieldGroup.__doc__\n- )\n+ \"\"\" + str(MeanFieldGroup.__doc__)\n _group_class = MeanFieldGroup\n \n \n class FullRank(SingleGroupApproximation):\n __doc__ = \"\"\"**Single Group Full Rank Approximation**\n \n- \"\"\" + str(\n- FullRankGroup.__doc__\n- )\n+ \"\"\" + str(FullRankGroup.__doc__)\n _group_class = FullRankGroup\n \n \n class Empirical(SingleGroupApproximation):\n __doc__ = \"\"\"**Single Group Full Rank Approximation**\n \n- \"\"\" + str(\n- EmpiricalGroup.__doc__\n- )\n+ \"\"\" + str(EmpiricalGroup.__doc__)\n _group_class = EmpiricalGroup\n \n def __init__(self, trace=None, size=None, **kwargs):\ndiff --git a/pymc/variational/operators.py b/pymc/variational/operators.py\nindex 1122a704b..f6ef09572 100644\n--- a/pymc/variational/operators.py\n+++ b/pymc/variational/operators.py\n@@ -130,6 +130,7 @@ class KSD(Operator):\n Stein Variational Gradient Descent: A General Purpose Bayesian Inference Algorithm\n arXiv:1608.04471\n \"\"\"\n+\n has_test_function = True\n returns_loss = False\n require_logq = False\ndiff --git a/pymc/variational/opvi.py b/pymc/variational/opvi.py\nindex cf98c985a..bd1874ffe 100644\n--- a/pymc/variational/opvi.py\n+++ b/pymc/variational/opvi.py\n@@ -663,6 +663,7 @@ class Group(WithMemoization):\n - Kingma, D. P., & Welling, M. (2014).\n `Auto-Encoding Variational Bayes. stat, 1050, 1. <https://arxiv.org/abs/1312.6114>`_\n \"\"\"\n+\n # needs to be defined in init\n shared_params = None\n symbolic_initial = None\n@@ -709,8 +710,9 @@ class Group(WithMemoization):\n def group_for_short_name(cls, name):\n if name.lower() not in cls.__name_registry:\n raise KeyError(\n- \"No such group: {!r}, \"\n- \"only the following are supported\\n\\n{}\".format(name, cls.__name_registry)\n+ \"No such group: {!r}, \" \"only the following are supported\\n\\n{}\".format(\n+ name, cls.__name_registry\n+ )\n )\n return cls.__name_registry[name.lower()]\n \ndiff --git a/versioneer.py b/versioneer.py\nindex a560e685f..c2b9d28bc 100644\n--- a/versioneer.py\n+++ b/versioneer.py\n@@ -432,9 +432,7 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=\n return stdout, process.returncode\n \n \n-LONG_VERSION_PY[\n- \"git\"\n-] = r'''\n+LONG_VERSION_PY[\"git\"] = r'''\n # This file helps to compute a version number in source trees obtained from\n # git-archive tarball (such as those provided by githubs download-from-tag\n # feature). Distribution tarballs (built by setup.py sdist) and build\n", "difficulty": 2, "changed_files": ["pymc/distributions/discrete.py", "pymc/distributions/multivariate.py", "pymc/model/core.py", "pymc/ode/ode.py", "pymc/printing.py", "pymc/step_methods/metropolis.py", "pymc/tuning/starting.py", "pymc/variational/approximations.py", "pymc/variational/operators.py", "pymc/variational/opvi.py", "versioneer.py"], "commit_link": "https://github.com/pymc-devs/pymc/tree/9981ca154ba03a88deaa96d16b119de6183017e5"}
data/python/99ad8a3.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"language": "Python", "id": 52, "repo_owner": "googleapis", "repo_name": "google-cloud-python", "head_branch": "owl-bot-copy-packages-google-ai-generativelanguage", "workflow_name": "docs", "workflow_filename": "docs.yml", "workflow_path": ".github/workflows/docs.yml", "contributor": "googleapis", "sha_fail": "99ad8a351bb884f1e398c1d85c62d6b6e0bdd67e", "sha_success": "fba9d4b774cc6158768b3b503c43f72c005edfce", "workflow": "on:\n pull_request:\n branches:\n - main\nname: docs\n\npermissions:\n contents: read\n\njobs:\n docs:\n runs-on: ubuntu-latest\n steps:\n - name: Checkout\n uses: actions/checkout@v4\n # Use a fetch-depth of 2 to avoid error `fatal: origin/main...HEAD: no merge base`\n # See https://github.com/googleapis/google-cloud-python/issues/12013\n # and https://github.com/actions/checkout#checkout-head.\n with:\n fetch-depth: 2\n - name: Setup Python\n uses: actions/setup-python@v5\n with:\n python-version: \"3.10\"\n - name: Install nox\n run: |\n python -m pip install --upgrade setuptools pip wheel\n python -m pip install nox\n - name: Run docs\n env:\n BUILD_TYPE: presubmit\n TEST_TYPE: docs\n PY_VERSION: \"3.10\"\n run: |\n ci/run_conditional_tests.sh\n docfx:\n runs-on: ubuntu-latest\n steps:\n - name: Checkout\n uses: actions/checkout@v4\n # Use a fetch-depth of 2 to avoid error `fatal: origin/main...HEAD: no merge base`\n # See https://github.com/googleapis/google-cloud-python/issues/12013\n # and https://github.com/actions/checkout#checkout-head.\n with:\n fetch-depth: 2\n - name: Setup Python\n uses: actions/setup-python@v5\n with:\n python-version: \"3.10\"\n - name: Install nox\n run: |\n python -m pip install --upgrade setuptools pip wheel\n python -m pip install nox\n - name: Run docfx\n env:\n BUILD_TYPE: presubmit\n TEST_TYPE: docfx\n PY_VERSION: \"3.10\"\n run: |\n ci/run_conditional_tests.sh\n", "logs": [{"step_name": "docs/5_Run docs.txt", "log": "##[group]Run ci/run_conditional_tests.sh\n\u001b[36;1mci/run_conditional_tests.sh\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.10.13/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.10.13/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.10.13/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.10.13/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.10.13/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.10.13/x64/lib\n BUILD_TYPE: presubmit\n TEST_TYPE: docs\n PY_VERSION: 3.10\n##[endgroup]\nFrom https://github.com/googleapis/google-cloud-python\n * branch main -> FETCH_HEAD\n * [new branch] main -> origin/main\nchecking changes with 'git diff --quiet origin/main... packages/google-ai-generativelanguage/'\nchange detected in packages/google-ai-generativelanguage/\nrunning test in packages/google-ai-generativelanguage/\n~/work/google-cloud-python/google-cloud-python/packages/google-ai-generativelanguage ~/work/google-cloud-python/google-cloud-python\nnox > Running session docs\nnox > Creating virtual environment (virtualenv) using python3.10 in .nox/docs\nnox > python -m pip install -e .\nnox > python -m pip install sphinxcontrib-applehelp==1.0.4 sphinxcontrib-devhelp==1.0.2 sphinxcontrib-htmlhelp==2.0.1 sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 sphinx==4.5.0 alabaster recommonmark\nnox > sphinx-build -W -T -N -b html -d docs/_build/doctrees/ docs/ docs/_build/html/\nRunning Sphinx v4.5.0\nmaking output directory... done\n[autosummary] generating autosummary for: CHANGELOG.md, README.rst, generativelanguage_v1/generative_service.rst, generativelanguage_v1/model_service.rst, generativelanguage_v1/services_.rst, generativelanguage_v1/types_.rst, generativelanguage_v1beta/discuss_service.rst, generativelanguage_v1beta/generative_service.rst, generativelanguage_v1beta/model_service.rst, generativelanguage_v1beta/permission_service.rst, ..., generativelanguage_v1beta2/text_service.rst, generativelanguage_v1beta2/types_.rst, generativelanguage_v1beta3/discuss_service.rst, generativelanguage_v1beta3/model_service.rst, generativelanguage_v1beta3/permission_service.rst, generativelanguage_v1beta3/services_.rst, generativelanguage_v1beta3/text_service.rst, generativelanguage_v1beta3/types_.rst, index.rst, multiprocessing.rst\nloading intersphinx inventory from https://python.readthedocs.org/en/latest/objects.inv...\nloading intersphinx inventory from https://googleapis.dev/python/google-auth/latest/objects.inv...\nloading intersphinx inventory from https://googleapis.dev/python/google-api-core/latest/objects.inv...\nloading intersphinx inventory from https://grpc.github.io/grpc/python/objects.inv...\nloading intersphinx inventory from https://proto-plus-python.readthedocs.io/en/latest/objects.inv...\nloading intersphinx inventory from https://googleapis.dev/python/protobuf/latest/objects.inv...\nintersphinx inventory has moved: https://python.readthedocs.org/en/latest/objects.inv -> https://python.readthedocs.io/en/latest/objects.inv\nbuilding [mo]: targets for 0 po files that are out of date\nbuilding [html]: targets for 27 source files that are out of date\nupdating environment: [new config] 27 added, 0 changed, 0 removed\nreading sources... [ 3%] CHANGELOG\n/home/runner/work/google-cloud-python/google-cloud-python/packages/google-ai-generativelanguage/.nox/docs/lib/python3.10/site-packages/recommonmark/parser.py:75: UserWarning: Container node skipped: type=document\n warn(\"Container node skipped: type={0}\".format(mdnode.t))\nreading sources... [ 7%] README\nreading sources... [ 11%] generativelanguage_v1/generative_service\nreading sources... [ 14%] generativelanguage_v1/model_service\nreading sources... [ 18%] generativelanguage_v1/services_\nreading sources... [ 22%] generativelanguage_v1/types_\nreading sources... [ 25%] generativelanguage_v1beta/discuss_service\nreading sources... [ 29%] generativelanguage_v1beta/generative_service\nreading sources... [ 33%] generativelanguage_v1beta/model_service\nreading sources... [ 37%] generativelanguage_v1beta/permission_service\nreading sources... [ 40%] generativelanguage_v1beta/retriever_service\nreading sources... [ 44%] generativelanguage_v1beta/services_\nreading sources... [ 48%] generativelanguage_v1beta/text_service\nreading sources... [ 51%] generativelanguage_v1beta/types_\nreading sources... [ 55%] generativelanguage_v1beta2/discuss_service\nreading sources... [ 59%] generativelanguage_v1beta2/model_service\nreading sources... [ 62%] generativelanguage_v1beta2/services_\nreading sources... [ 66%] generativelanguage_v1beta2/text_service\nreading sources... [ 70%] generativelanguage_v1beta2/types_\nreading sources... [ 74%] generativelanguage_v1beta3/discuss_service\nreading sources... [ 77%] generativelanguage_v1beta3/model_service\nreading sources... [ 81%] generativelanguage_v1beta3/permission_service\nreading sources... [ 85%] generativelanguage_v1beta3/services_\nreading sources... [ 88%] generativelanguage_v1beta3/text_service\nreading sources... [ 92%] generativelanguage_v1beta3/types_\nreading sources... [ 96%] index\nreading sources... [100%] multiprocessing\n\n\nTraceback (most recent call last):\n File \"/home/runner/work/google-cloud-python/google-cloud-python/packages/google-ai-generativelanguage/.nox/docs/lib/python3.10/site-packages/sphinx/cmd/build.py\", line 276, in build_main\n app.build(args.force_all, filenames)\n File \"/home/runner/work/google-cloud-python/google-cloud-python/packages/google-ai-generativelanguage/.nox/docs/lib/python3.10/site-packages/sphinx/application.py\", line 330, in build\n self.builder.build_update()\n File \"/home/runner/work/google-cloud-python/google-cloud-python/packages/google-ai-generativelanguage/.nox/docs/lib/python3.10/site-packages/sphinx/builders/__init__.py\", line 286, in build_update\n self.build(to_build,\n File \"/home/runner/work/google-cloud-python/google-cloud-python/packages/google-ai-generativelanguage/.nox/docs/lib/python3.10/site-packages/sphinx/builders/__init__.py\", line 299, in build\n with logging.pending_warnings():\n File \"/opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/contextlib.py\", line 142, in __exit__\n next(self.gen)\n File \"/home/runner/work/google-cloud-python/google-cloud-python/packages/google-ai-generativelanguage/.nox/docs/lib/python3.10/site-packages/sphinx/util/logging.py\", line 217, in pending_warnings\n memhandler.flushTo(logger)\n File \"/home/runner/work/google-cloud-python/google-cloud-python/packages/google-ai-generativelanguage/.nox/docs/lib/python3.10/site-packages/sphinx/util/logging.py\", line 182, in flushTo\n logger.handle(record)\n File \"/opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/logging/__init__.py\", line 1634, in handle\n self.callHandlers(record)\n File \"/opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/logging/__init__.py\", line 1696, in callHandlers\n hdlr.handle(record)\n File \"/opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/logging/__init__.py\", line 964, in handle\n rv = self.filter(record)\n File \"/opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/logging/__init__.py\", line 821, in filter\n result = f.filter(record)\n File \"/home/runner/work/google-cloud-python/google-cloud-python/packages/google-ai-generativelanguage/.nox/docs/lib/python3.10/site-packages/sphinx/util/logging.py\", line 425, in filter\n raise exc\nsphinx.errors.SphinxWarning: /home/runner/work/google-cloud-python/google-cloud-python/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/retriever.py:docstring of google.ai.generativelanguage_v1beta.types.retriever.Document:6:Inline emphasis start-string without end-string.\n\nWarning, treated as error:\n/home/runner/work/google-cloud-python/google-cloud-python/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/retriever.py:docstring of google.ai.generativelanguage_v1beta.types.retriever.Document:6:Inline emphasis start-string without end-string.\nnox > Command sphinx-build -W -T -N -b html -d docs/_build/doctrees/ docs/ docs/_build/html/ failed with exit code 2\nnox > Session docs failed.\n~/work/google-cloud-python/google-cloud-python\nchecking changes with 'git diff --quiet origin/main... packages/google-analytics-admin/'\nno change detected in packages/google-analytics-admin/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-analytics-data/'\nno change detected in packages/google-analytics-data/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-apps-meet/'\nno change detected in packages/google-apps-meet/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-apps-script-type/'\nno change detected in packages/google-apps-script-type/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-area120-tables/'\nno change detected in packages/google-area120-tables/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-access-approval/'\nno change detected in packages/google-cloud-access-approval/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-advisorynotifications/'\nno change detected in packages/google-cloud-advisorynotifications/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-alloydb-connectors/'\nno change detected in packages/google-cloud-alloydb-connectors/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-alloydb/'\nno change detected in packages/google-cloud-alloydb/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-api-gateway/'\nno change detected in packages/google-cloud-api-gateway/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-api-keys/'\nno change detected in packages/google-cloud-api-keys/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-apigee-connect/'\nno change detected in packages/google-cloud-apigee-connect/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-apigee-registry/'\nno change detected in packages/google-cloud-apigee-registry/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-appengine-admin/'\nno change detected in packages/google-cloud-appengine-admin/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-appengine-logging/'\nno change detected in packages/google-cloud-appengine-logging/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-artifact-registry/'\nno change detected in packages/google-cloud-artifact-registry/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-asset/'\nno change detected in packages/google-cloud-asset/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-assured-workloads/'\nno change detected in packages/google-cloud-assured-workloads/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-automl/'\nno change detected in packages/google-cloud-automl/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-bare-metal-solution/'\nno change detected in packages/google-cloud-bare-metal-solution/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-batch/'\nno change detected in packages/google-cloud-batch/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-beyondcorp-appconnections/'\nno change detected in packages/google-cloud-beyondcorp-appconnections/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-beyondcorp-appconnectors/'\nno change detected in packages/google-cloud-beyondcorp-appconnectors/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-beyondcorp-appgateways/'\nno change detected in packages/google-cloud-beyondcorp-appgateways/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-beyondcorp-clientconnectorservices/'\nno change detected in packages/google-cloud-beyondcorp-clientconnectorservices/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-beyondcorp-clientgateways/'\nno change detected in packages/google-cloud-beyondcorp-clientgateways/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-bigquery-analyticshub/'\nno change detected in packages/google-cloud-bigquery-analyticshub/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-bigquery-biglake/'\nno change detected in packages/google-cloud-bigquery-biglake/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-bigquery-connection/'\nno change detected in packages/google-cloud-bigquery-connection/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-bigquery-data-exchange/'\nno change detected in packages/google-cloud-bigquery-data-exchange/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-bigquery-datapolicies/'\nno change detected in packages/google-cloud-bigquery-datapolicies/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-bigquery-datatransfer/'\nno change detected in packages/google-cloud-bigquery-datatransfer/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-bigquery-logging/'\nno change detected in packages/google-cloud-bigquery-logging/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-bigquery-migration/'\nno change detected in packages/google-cloud-bigquery-migration/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-bigquery-reservation/'\nno change detected in packages/google-cloud-bigquery-reservation/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-billing-budgets/'\nno change detected in packages/google-cloud-billing-budgets/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-billing/'\nno change detected in packages/google-cloud-billing/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-binary-authorization/'\nno change detected in packages/google-cloud-binary-authorization/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-build/'\nno change detected in packages/google-cloud-build/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-certificate-manager/'\nno change detected in packages/google-cloud-certificate-manager/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-channel/'\nno change detected in packages/google-cloud-channel/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-cloudquotas/'\nno change detected in packages/google-cloud-cloudquotas/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-commerce-consumer-procurement/'\nno change detected in packages/google-cloud-commerce-consumer-procurement/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-common/'\nno change detected in packages/google-cloud-common/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-compute/'\nno change detected in packages/google-cloud-compute/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-confidentialcomputing/'\nno change detected in packages/google-cloud-confidentialcomputing/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-config/'\nno change detected in packages/google-cloud-config/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-contact-center-insights/'\nno change detected in packages/google-cloud-contact-center-insights/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-container/'\nno change detected in packages/google-cloud-container/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-containeranalysis/'\nno change detected in packages/google-cloud-containeranalysis/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-contentwarehouse/'\nno change detected in packages/google-cloud-contentwarehouse/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-data-fusion/'\nno change detected in packages/google-cloud-data-fusion/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-data-qna/'\nno change detected in packages/google-cloud-data-qna/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-datacatalog-lineage/'\nno change detected in packages/google-cloud-datacatalog-lineage/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-datacatalog/'\nno change detected in packages/google-cloud-datacatalog/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-dataflow-client/'\nno change detected in packages/google-cloud-dataflow-client/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-dataform/'\nno change detected in packages/google-cloud-dataform/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-datalabeling/'\nno change detected in packages/google-cloud-datalabeling/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-dataplex/'\nno change detected in packages/google-cloud-dataplex/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-dataproc-metastore/'\nno change detected in packages/google-cloud-dataproc-metastore/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-dataproc/'\nno change detected in packages/google-cloud-dataproc/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-datastream/'\nno change detected in packages/google-cloud-datastream/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-deploy/'\nno change detected in packages/google-cloud-deploy/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-dialogflow-cx/'\nno change detected in packages/google-cloud-dialogflow-cx/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-dialogflow/'\nno change detected in packages/google-cloud-dialogflow/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-discoveryengine/'\nno change detected in packages/google-cloud-discoveryengine/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-dlp/'\nno change detected in packages/google-cloud-dlp/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-dms/'\nno change detected in packages/google-cloud-dms/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-documentai/'\nno change detected in packages/google-cloud-documentai/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-domains/'\nno change detected in packages/google-cloud-domains/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-edgecontainer/'\nno change detected in packages/google-cloud-edgecontainer/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-edgenetwork/'\nno change detected in packages/google-cloud-edgenetwork/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-enterpriseknowledgegraph/'\nno change detected in packages/google-cloud-enterpriseknowledgegraph/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-essential-contacts/'\nno change detected in packages/google-cloud-essential-contacts/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-eventarc-publishing/'\nno change detected in packages/google-cloud-eventarc-publishing/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-eventarc/'\nno change detected in packages/google-cloud-eventarc/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-filestore/'\nno change detected in packages/google-cloud-filestore/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-functions/'\nno change detected in packages/google-cloud-functions/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-gke-backup/'\nno change detected in packages/google-cloud-gke-backup/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-gke-connect-gateway/'\nno change detected in packages/google-cloud-gke-connect-gateway/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-gke-hub/'\nno change detected in packages/google-cloud-gke-hub/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-gke-multicloud/'\nno change detected in packages/google-cloud-gke-multicloud/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-gsuiteaddons/'\nno change detected in packages/google-cloud-gsuiteaddons/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-iam-logging/'\nno change detected in packages/google-cloud-iam-logging/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-iam/'\nno change detected in packages/google-cloud-iam/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-iap/'\nno change detected in packages/google-cloud-iap/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-ids/'\nno change detected in packages/google-cloud-ids/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-kms-inventory/'\nno change detected in packages/google-cloud-kms-inventory/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-kms/'\nno change detected in packages/google-cloud-kms/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-language/'\nno change detected in packages/google-cloud-language/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-life-sciences/'\nno change detected in packages/google-cloud-life-sciences/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-managed-identities/'\nno change detected in packages/google-cloud-managed-identities/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-media-translation/'\nno change detected in packages/google-cloud-media-translation/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-memcache/'\nno change detected in packages/google-cloud-memcache/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-migrationcenter/'\nno change detected in packages/google-cloud-migrationcenter/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-monitoring-dashboards/'\nno change detected in packages/google-cloud-monitoring-dashboards/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-monitoring-metrics-scopes/'\nno change detected in packages/google-cloud-monitoring-metrics-scopes/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-monitoring/'\nno change detected in packages/google-cloud-monitoring/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-netapp/'\nno change detected in packages/google-cloud-netapp/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-network-connectivity/'\nno change detected in packages/google-cloud-network-connectivity/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-network-management/'\nno change detected in packages/google-cloud-network-management/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-network-security/'\nno change detected in packages/google-cloud-network-security/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-network-services/'\nno change detected in packages/google-cloud-network-services/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-notebooks/'\nno change detected in packages/google-cloud-notebooks/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-optimization/'\nno change detected in packages/google-cloud-optimization/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-orchestration-airflow/'\nno change detected in packages/google-cloud-orchestration-airflow/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-os-config/'\nno change detected in packages/google-cloud-os-config/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-os-login/'\nno change detected in packages/google-cloud-os-login/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-phishing-protection/'\nno change detected in packages/google-cloud-phishing-protection/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-policy-troubleshooter/'\nno change detected in packages/google-cloud-policy-troubleshooter/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-policysimulator/'\nno change detected in packages/google-cloud-policysimulator/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-policytroubleshooter-iam/'\nno change detected in packages/google-cloud-policytroubleshooter-iam/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-private-ca/'\nno change detected in packages/google-cloud-private-ca/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-private-catalog/'\nno change detected in packages/google-cloud-private-catalog/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-public-ca/'\nno change detected in packages/google-cloud-public-ca/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-rapidmigrationassessment/'\nno change detected in packages/google-cloud-rapidmigrationassessment/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-recaptcha-enterprise/'\nno change detected in packages/google-cloud-recaptcha-enterprise/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-recommendations-ai/'\nno change detected in packages/google-cloud-recommendations-ai/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-recommender/'\nno change detected in packages/google-cloud-recommender/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-redis-cluster/'\nno change detected in packages/google-cloud-redis-cluster/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-redis/'\nno change detected in packages/google-cloud-redis/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-resource-manager/'\nno change detected in packages/google-cloud-resource-manager/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-resource-settings/'\nno change detected in packages/google-cloud-resource-settings/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-retail/'\nno change detected in packages/google-cloud-retail/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-run/'\nno change detected in packages/google-cloud-run/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-scheduler/'\nno change detected in packages/google-cloud-scheduler/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-secret-manager/'\nno change detected in packages/google-cloud-secret-manager/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-securesourcemanager/'\nno change detected in packages/google-cloud-securesourcemanager/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-securitycenter/'\nno change detected in packages/google-cloud-securitycenter/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-securitycentermanagement/'\nno change detected in packages/google-cloud-securitycentermanagement/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-service-control/'\nno change detected in packages/google-cloud-service-control/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-service-directory/'\nno change detected in packages/google-cloud-service-directory/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-service-management/'\nno change detected in packages/google-cloud-service-management/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-service-usage/'\nno change detected in packages/google-cloud-service-usage/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-servicehealth/'\nno change detected in packages/google-cloud-servicehealth/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-shell/'\nno change detected in packages/google-cloud-shell/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-source-context/'\nno change detected in packages/google-cloud-source-context/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-speech/'\nno change detected in packages/google-cloud-speech/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-storage-transfer/'\nno change detected in packages/google-cloud-storage-transfer/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-storageinsights/'\nno change detected in packages/google-cloud-storageinsights/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-support/'\nno change detected in packages/google-cloud-support/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-talent/'\nno change detected in packages/google-cloud-talent/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-tasks/'\nno change detected in packages/google-cloud-tasks/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-telcoautomation/'\nno change detected in packages/google-cloud-telcoautomation/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-texttospeech/'\nno change detected in packages/google-cloud-texttospeech/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-tpu/'\nno change detected in packages/google-cloud-tpu/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-trace/'\nno change detected in packages/google-cloud-trace/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-translate/'\nno change detected in packages/google-cloud-translate/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-video-live-stream/'\nno change detected in packages/google-cloud-video-live-stream/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-video-stitcher/'\nno change detected in packages/google-cloud-video-stitcher/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-video-transcoder/'\nno change detected in packages/google-cloud-video-transcoder/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-videointelligence/'\nno change detected in packages/google-cloud-videointelligence/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-vision/'\nno change detected in packages/google-cloud-vision/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-vm-migration/'\nno change detected in packages/google-cloud-vm-migration/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-vmwareengine/'\nno change detected in packages/google-cloud-vmwareengine/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-vpc-access/'\nno change detected in packages/google-cloud-vpc-access/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-webrisk/'\nno change detected in packages/google-cloud-webrisk/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-websecurityscanner/'\nno change detected in packages/google-cloud-websecurityscanner/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-workflows/'\nno change detected in packages/google-cloud-workflows/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-cloud-workstations/'\nno change detected in packages/google-cloud-workstations/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-geo-type/'\nno change detected in packages/google-geo-type/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-maps-addressvalidation/'\nno change detected in packages/google-maps-addressvalidation/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-maps-fleetengine-delivery/'\nno change detected in packages/google-maps-fleetengine-delivery/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-maps-fleetengine/'\nno change detected in packages/google-maps-fleetengine/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-maps-mapsplatformdatasets/'\nno change detected in packages/google-maps-mapsplatformdatasets/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-maps-places/'\nno change detected in packages/google-maps-places/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-maps-routing/'\nno change detected in packages/google-maps-routing/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-shopping-css/'\nno change detected in packages/google-shopping-css/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-shopping-merchant-inventories/'\nno change detected in packages/google-shopping-merchant-inventories/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-shopping-merchant-reports/'\nno change detected in packages/google-shopping-merchant-reports/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/google-shopping-type/'\nno change detected in packages/google-shopping-type/, skipping\nchecking changes with 'git diff --quiet origin/main... packages/grafeas/'\nno change detected in packages/grafeas/, skipping\n##[error]Process completed with exit code 1.\n"}], "diff": "diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/retriever.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/retriever.py\nindex c00ad1b89194..3af2c6a1a58c 100644\n--- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/retriever.py\n+++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/retriever.py\n@@ -88,7 +88,7 @@ class Document(proto.Message):\n Attributes:\n name (str):\n Immutable. Identifier. The ``Document`` resource name. The\n- ID (name excluding the \"corpora/*/documents/\" prefix) can\n+ ID (name excluding the `corpora/*/documents/` prefix) can\n contain up to 40 characters that are lowercase alphanumeric\n or dashes (-). The ID cannot start or end with a dash. If\n the name is empty on create, a unique name will be derived\n@@ -315,7 +315,7 @@ class Chunk(proto.Message):\n Attributes:\n name (str):\n Immutable. Identifier. The ``Chunk`` resource name. The ID\n- (name excluding the \"corpora/*/documents/*/chunks/\" prefix)\n+ (name excluding the `corpora/*/documents/*/chunks/` prefix)\n can contain up to 40 characters that are lowercase\n alphanumeric or dashes (-). The ID cannot start or end with\n a dash. If the name is empty on create, a random\ndiff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/async_client.py\nindex 1a80d83885de..28d0b0f12ee5 100644\n--- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/async_client.py\n+++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/async_client.py\n@@ -305,11 +305,11 @@ class PermissionServiceAsyncClient:\n role is a superset of the previous\n role's permitted operations:\n \n- - reader can use the resource (e.g.\n+ - reader can use the resource (e.g.\n tuned model) for inference\n- - writer has reader's permissions and\n+ - writer has reader's permissions and\n additionally can edit and share\n- - owner has writer's permissions and\n+ - owner has writer's permissions and\n additionally can delete\n \n \"\"\"\n@@ -432,11 +432,11 @@ class PermissionServiceAsyncClient:\n role is a superset of the previous\n role's permitted operations:\n \n- - reader can use the resource (e.g.\n+ - reader can use the resource (e.g.\n tuned model) for inference\n- - writer has reader's permissions and\n+ - writer has reader's permissions and\n additionally can edit and share\n- - owner has writer's permissions and\n+ - owner has writer's permissions and\n additionally can delete\n \n \"\"\"\n@@ -682,11 +682,11 @@ class PermissionServiceAsyncClient:\n role is a superset of the previous\n role's permitted operations:\n \n- - reader can use the resource (e.g.\n+ - reader can use the resource (e.g.\n tuned model) for inference\n- - writer has reader's permissions and\n+ - writer has reader's permissions and\n additionally can edit and share\n- - owner has writer's permissions and\n+ - owner has writer's permissions and\n additionally can delete\n \n \"\"\"\ndiff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/client.py\nindex 78bbe681b0cc..9afdb7375e5e 100644\n--- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/client.py\n+++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/client.py\n@@ -542,11 +542,11 @@ class PermissionServiceClient(metaclass=PermissionServiceClientMeta):\n role is a superset of the previous\n role's permitted operations:\n \n- - reader can use the resource (e.g.\n+ - reader can use the resource (e.g.\n tuned model) for inference\n- - writer has reader's permissions and\n+ - writer has reader's permissions and\n additionally can edit and share\n- - owner has writer's permissions and\n+ - owner has writer's permissions and\n additionally can delete\n \n \"\"\"\n@@ -669,11 +669,11 @@ class PermissionServiceClient(metaclass=PermissionServiceClientMeta):\n role is a superset of the previous\n role's permitted operations:\n \n- - reader can use the resource (e.g.\n+ - reader can use the resource (e.g.\n tuned model) for inference\n- - writer has reader's permissions and\n+ - writer has reader's permissions and\n additionally can edit and share\n- - owner has writer's permissions and\n+ - owner has writer's permissions and\n additionally can delete\n \n \"\"\"\n@@ -919,11 +919,11 @@ class PermissionServiceClient(metaclass=PermissionServiceClientMeta):\n role is a superset of the previous\n role's permitted operations:\n \n- - reader can use the resource (e.g.\n+ - reader can use the resource (e.g.\n tuned model) for inference\n- - writer has reader's permissions and\n+ - writer has reader's permissions and\n additionally can edit and share\n- - owner has writer's permissions and\n+ - owner has writer's permissions and\n additionally can delete\n \n \"\"\"\ndiff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/transports/rest.py\nindex 12af3b148a45..352dfe0983f9 100644\n--- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/transports/rest.py\n+++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/transports/rest.py\n@@ -395,11 +395,11 @@ class PermissionServiceRestTransport(PermissionServiceTransport):\n role is a superset of the previous\n role's permitted operations:\n \n- - reader can use the resource (e.g.\n+ - reader can use the resource (e.g.\n tuned model) for inference\n- - writer has reader's permissions and\n+ - writer has reader's permissions and\n additionally can edit and share\n- - owner has writer's permissions and\n+ - owner has writer's permissions and\n additionally can delete\n \n \"\"\"\n@@ -592,11 +592,11 @@ class PermissionServiceRestTransport(PermissionServiceTransport):\n role is a superset of the previous\n role's permitted operations:\n \n- - reader can use the resource (e.g.\n+ - reader can use the resource (e.g.\n tuned model) for inference\n- - writer has reader's permissions and\n+ - writer has reader's permissions and\n additionally can edit and share\n- - owner has writer's permissions and\n+ - owner has writer's permissions and\n additionally can delete\n \n \"\"\"\n@@ -891,11 +891,11 @@ class PermissionServiceRestTransport(PermissionServiceTransport):\n role is a superset of the previous\n role's permitted operations:\n \n- - reader can use the resource (e.g.\n+ - reader can use the resource (e.g.\n tuned model) for inference\n- - writer has reader's permissions and\n+ - writer has reader's permissions and\n additionally can edit and share\n- - owner has writer's permissions and\n+ - owner has writer's permissions and\n additionally can delete\n \n \"\"\"\ndiff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/types/permission.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/types/permission.py\nindex 115ca22e8bef..09af2311c4ed 100644\n--- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/types/permission.py\n+++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/types/permission.py\n@@ -41,10 +41,10 @@ class Permission(proto.Message):\n There are three concentric roles. Each role is a superset of the\n previous role's permitted operations:\n \n- - reader can use the resource (e.g. tuned model) for inference\n- - writer has reader's permissions and additionally can edit and\n+ - reader can use the resource (e.g. tuned model) for inference\n+ - writer has reader's permissions and additionally can edit and\n share\n- - owner has writer's permissions and additionally can delete\n+ - owner has writer's permissions and additionally can delete\n \n \n .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields\n", "difficulty": 0, "changed_files": ["packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/retriever.py", "packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/async_client.py", "packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/client.py", "packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/transports/rest.py", "packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/types/permission.py"], "commit_link": "https://github.com/googleapis/google-cloud-python/tree/99ad8a351bb884f1e398c1d85c62d6b6e0bdd67e"}
data/python/9e1aa7b.json ADDED
The diff for this file is too large to render. See raw diff
 
data/python/aa8a42b.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"language": "Python", "id": 74, "repo_owner": "encode", "repo_name": "httpx", "head_branch": "cookie-persistence-option", "workflow_name": "Test Suite", "workflow_filename": "test-suite.yml", "workflow_path": ".github/workflows/test-suite.yml", "contributor": "MarkWine", "sha_fail": "aa8a42bcf03f3b89575a9cce2f8af715a5121c59", "sha_success": "841f11c1a93299b9032d18d0bb5d112421149336", "workflow": "---\nname: Test Suite\n\non:\n push:\n branches: [\"master\"]\n pull_request:\n branches: [\"master\"]\n\njobs:\n tests:\n name: \"Python ${{ matrix.python-version }}\"\n runs-on: \"ubuntu-latest\"\n\n strategy:\n matrix:\n python-version: [\"3.8\", \"3.9\", \"3.10\", \"3.11\", \"3.12\"]\n\n steps:\n - uses: \"actions/checkout@v4\"\n - uses: \"actions/setup-python@v4\"\n with:\n python-version: \"${{ matrix.python-version }}\"\n allow-prereleases: true\n - name: \"Install dependencies\"\n run: \"scripts/install\"\n - name: \"Run linting checks\"\n run: \"scripts/check\"\n - name: \"Build package & docs\"\n run: \"scripts/build\"\n - name: \"Run tests\"\n run: \"scripts/test\"\n - name: \"Enforce coverage\"\n run: \"scripts/coverage\"\n", "logs": [{"step_name": "Python 3.8/7_Run tests.txt", "log": "##[group]Run scripts/test\n\u001b[36;1mscripts/test\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.8.18/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.8.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.8.18/x64/lib\n##[endgroup]\n+ [ -z true ]\n+ coverage run -m pytest\n============================= test session starts ==============================\nplatform linux -- Python 3.8.18, pytest-7.4.4, pluggy-1.3.0\nrootdir: /home/runner/work/httpx/httpx\nconfigfile: pyproject.toml\nplugins: anyio-4.2.0\ncollected 855 items\n\ntests/test_api.py ........... [ 1%]\ntests/test_asgi.py ........................ [ 4%]\ntests/test_auth.py ........ [ 5%]\ntests/test_config.py ............................... [ 8%]\ntests/test_content.py ........................................ [ 13%]\ntests/test_decoders.py ................................... [ 17%]\ntests/test_exceptions.py ... [ 17%]\ntests/test_exported_members.py . [ 17%]\ntests/test_main.py ........... [ 19%]\ntests/test_multipart.py ...................................... [ 23%]\ntests/test_status_codes.py ...... [ 24%]\ntests/test_timeouts.py ........ [ 25%]\ntests/test_utils.py .................................................... [ 31%]\n.... [ 31%]\ntests/test_wsgi.py ............ [ 33%]\ntests/client/test_async_client.py ...................................... [ 37%]\n.............. [ 39%]\ntests/client/test_auth.py .............................................. [ 44%]\n..........................s........ [ 48%]\ntests/client/test_client.py ................................... [ 52%]\ntests/client/test_cookies.py .....FF. [ 53%]\ntests/client/test_event_hooks.py ......... [ 54%]\ntests/client/test_headers.py ........... [ 56%]\ntests/client/test_properties.py ........ [ 57%]\ntests/client/test_proxies.py ........................................... [ 62%]\n.................................... [ 66%]\ntests/client/test_queryparams.py ... [ 66%]\ntests/client/test_redirects.py .............................F. [ 70%]\ntests/models/test_cookies.py ....... [ 71%]\ntests/models/test_headers.py .................. [ 73%]\ntests/models/test_queryparams.py .............. [ 74%]\ntests/models/test_requests.py ....................... [ 77%]\ntests/models/test_responses.py ......................................... [ 82%]\n................................................................. [ 89%]\ntests/models/test_url.py ............................................... [ 95%]\n....................................... [100%]\n\n=================================== FAILURES ===================================\n_______________________________ test_get_cookie ________________________________\n\n def test_get_cookie() -> None:\n url = \"http://example.org/set_cookie\"\n \n client = httpx.Client(transport=httpx.MockTransport(get_and_set_cookies))\n response = client.get(url)\n \n assert response.status_code == 200\n assert response.cookies[\"example-name\"] == \"example-value\"\n> assert client.cookies[\"example-name\"] == \"example-value\"\n\ntests/client/test_cookies.py:148: \n_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ \n\nself = <Cookies[]>, name = 'example-name'\n\n def __getitem__(self, name: str) -> str:\n value = self.get(name)\n if value is None:\n> raise KeyError(name)\nE KeyError: 'example-name'\n\nhttpx/_models.py:1154: KeyError\n___________________________ test_cookie_persistence ____________________________\n\n def test_cookie_persistence() -> None:\n \"\"\"\n Ensure that Client instances persist cookies between requests.\n \"\"\"\n client = httpx.Client(transport=httpx.MockTransport(get_and_set_cookies))\n \n response = client.get(\"http://example.org/echo_cookies\")\n assert response.status_code == 200\n assert response.json() == {\"cookies\": None}\n \n response = client.get(\"http://example.org/set_cookie\")\n assert response.status_code == 200\n assert response.cookies[\"example-name\"] == \"example-value\"\n> assert client.cookies[\"example-name\"] == \"example-value\"\n\ntests/client/test_cookies.py:164: \n_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ \n\nself = <Cookies[]>, name = 'example-name'\n\n def __getitem__(self, name: str) -> str:\n value = self.get(name)\n if value is None:\n> raise KeyError(name)\nE KeyError: 'example-name'\n\nhttpx/_models.py:1154: KeyError\n________________________ test_redirect_cookie_behavior _________________________\n\n def test_redirect_cookie_behavior():\n client = httpx.Client(\n transport=httpx.MockTransport(cookie_sessions), follow_redirects=True\n )\n \n # The client is not logged in.\n response = client.get(\"https://example.com/\")\n assert response.url == \"https://example.com/\"\n assert response.text == \"Not logged in\"\n \n # Login redirects to the homepage, setting a session cookie.\n response = client.post(\"https://example.com/login\")\n assert response.url == \"https://example.com/\"\n> assert response.text == \"Logged in\"\nE AssertionError: assert 'Not logged in' == 'Logged in'\nE - Logged in\nE + Not logged in\n\ntests/client/test_redirects.py:416: AssertionError\n=========================== short test summary info ============================\nSKIPPED [1] tests/client/test_auth.py:272: netrc files without a password are invalid with Python < 3.11\n================== 3 failed, 851 passed, 1 skipped in 17.34s ===================\n##[error]Process completed with exit code 1.\n"}, {"step_name": "Python 3.11/7_Run tests.txt", "log": "##[group]Run scripts/test\n\u001b[36;1mscripts/test\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.11.7/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.11.7/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.11.7/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.11.7/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.11.7/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.11.7/x64/lib\n##[endgroup]\n+ [ -z true ]\n+ coverage run -m pytest\n============================= test session starts ==============================\nplatform linux -- Python 3.11.7, pytest-7.4.4, pluggy-1.3.0\nrootdir: /home/runner/work/httpx/httpx\nconfigfile: pyproject.toml\nplugins: anyio-4.2.0\ncollected 855 items\n\ntests/test_api.py ........... [ 1%]\ntests/test_asgi.py ........................ [ 4%]\ntests/test_auth.py ........ [ 5%]\ntests/test_config.py ............................... [ 8%]\ntests/test_content.py ........................................ [ 13%]\ntests/test_decoders.py ................................... [ 17%]\ntests/test_exceptions.py ... [ 17%]\ntests/test_exported_members.py . [ 17%]\ntests/test_main.py ........... [ 19%]\ntests/test_multipart.py ...................................... [ 23%]\ntests/test_status_codes.py ...... [ 24%]\ntests/test_timeouts.py ........ [ 25%]\ntests/test_utils.py .................................................... [ 31%]\n.... [ 31%]\ntests/test_wsgi.py ............ [ 33%]\ntests/client/test_async_client.py ...................................... [ 37%]\n.............. [ 39%]\ntests/client/test_auth.py .............................................. [ 44%]\n...........................s....... [ 48%]\ntests/client/test_client.py ................................... [ 52%]\ntests/client/test_cookies.py .....FF. [ 53%]\ntests/client/test_event_hooks.py ......... [ 54%]\ntests/client/test_headers.py ........... [ 56%]\ntests/client/test_properties.py ........ [ 57%]\ntests/client/test_proxies.py ........................................... [ 62%]\n.................................... [ 66%]\ntests/client/test_queryparams.py ... [ 66%]\ntests/client/test_redirects.py .............................F. [ 70%]\ntests/models/test_cookies.py ....... [ 71%]\ntests/models/test_headers.py .................. [ 73%]\ntests/models/test_queryparams.py .............. [ 74%]\ntests/models/test_requests.py ....................... [ 77%]\ntests/models/test_responses.py ......................................... [ 82%]\n................................................................. [ 89%]\ntests/models/test_url.py ............................................... [ 95%]\n....................................... [100%]\n\n=================================== FAILURES ===================================\n_______________________________ test_get_cookie ________________________________\n\n def test_get_cookie() -> None:\n url = \"http://example.org/set_cookie\"\n \n client = httpx.Client(transport=httpx.MockTransport(get_and_set_cookies))\n response = client.get(url)\n \n assert response.status_code == 200\n assert response.cookies[\"example-name\"] == \"example-value\"\n> assert client.cookies[\"example-name\"] == \"example-value\"\n\ntests/client/test_cookies.py:148: \n_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ \n\nself = <Cookies[]>, name = 'example-name'\n\n def __getitem__(self, name: str) -> str:\n value = self.get(name)\n if value is None:\n> raise KeyError(name)\nE KeyError: 'example-name'\n\nhttpx/_models.py:1154: KeyError\n___________________________ test_cookie_persistence ____________________________\n\n def test_cookie_persistence() -> None:\n \"\"\"\n Ensure that Client instances persist cookies between requests.\n \"\"\"\n client = httpx.Client(transport=httpx.MockTransport(get_and_set_cookies))\n \n response = client.get(\"http://example.org/echo_cookies\")\n assert response.status_code == 200\n assert response.json() == {\"cookies\": None}\n \n response = client.get(\"http://example.org/set_cookie\")\n assert response.status_code == 200\n assert response.cookies[\"example-name\"] == \"example-value\"\n> assert client.cookies[\"example-name\"] == \"example-value\"\n\ntests/client/test_cookies.py:164: \n_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ \n\nself = <Cookies[]>, name = 'example-name'\n\n def __getitem__(self, name: str) -> str:\n value = self.get(name)\n if value is None:\n> raise KeyError(name)\nE KeyError: 'example-name'\n\nhttpx/_models.py:1154: KeyError\n________________________ test_redirect_cookie_behavior _________________________\n\n def test_redirect_cookie_behavior():\n client = httpx.Client(\n transport=httpx.MockTransport(cookie_sessions), follow_redirects=True\n )\n \n # The client is not logged in.\n response = client.get(\"https://example.com/\")\n assert response.url == \"https://example.com/\"\n assert response.text == \"Not logged in\"\n \n # Login redirects to the homepage, setting a session cookie.\n response = client.post(\"https://example.com/login\")\n assert response.url == \"https://example.com/\"\n> assert response.text == \"Logged in\"\nE AssertionError: assert 'Not logged in' == 'Logged in'\nE - Logged in\nE + Not logged in\n\ntests/client/test_redirects.py:416: AssertionError\n=========================== short test summary info ============================\nSKIPPED [1] tests/client/test_auth.py:295: netrc files without a password are valid from Python >= 3.11\n================== 3 failed, 851 passed, 1 skipped in 16.79s ===================\n##[error]Process completed with exit code 1.\n"}], "diff": "diff --git a/httpx/_client.py b/httpx/_client.py\nindex 0a7490d..113eb47 100644\n--- a/httpx/_client.py\n+++ b/httpx/_client.py\n@@ -164,7 +164,7 @@ class BaseClient:\n params: typing.Optional[QueryParamTypes] = None,\n headers: typing.Optional[HeaderTypes] = None,\n cookies: typing.Optional[CookieTypes] = None,\n- persistent_cookies: bool = False,\n+ persistent_cookies: bool = True,\n timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG,\n follow_redirects: bool = False,\n max_redirects: int = DEFAULT_MAX_REDIRECTS,\n@@ -630,7 +630,7 @@ class Client(BaseClient):\n params: typing.Optional[QueryParamTypes] = None,\n headers: typing.Optional[HeaderTypes] = None,\n cookies: typing.Optional[CookieTypes] = None,\n- persistent_cookies: bool = False,\n+ persistent_cookies: bool = True,\n verify: VerifyTypes = True,\n cert: typing.Optional[CertTypes] = None,\n http1: bool = True,\n@@ -1375,7 +1375,7 @@ class AsyncClient(BaseClient):\n params: typing.Optional[QueryParamTypes] = None,\n headers: typing.Optional[HeaderTypes] = None,\n cookies: typing.Optional[CookieTypes] = None,\n- persistent_cookies: bool = False,\n+ persistent_cookies: bool = True,\n verify: VerifyTypes = True,\n cert: typing.Optional[CertTypes] = None,\n http1: bool = True,\n", "difficulty": 3, "changed_files": ["httpx/_client.py"], "commit_link": "https://github.com/encode/httpx/tree/aa8a42bcf03f3b89575a9cce2f8af715a5121c59"}
data/python/ac842d4.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"language": "Python", "id": 39, "repo_owner": "qtile", "repo_name": "qtile", "head_branch": "master", "workflow_name": "Run pre-commit", "workflow_filename": "pre_commit.yml", "workflow_path": ".github/workflows/pre_commit.yml", "contributor": "qtile", "sha_fail": "ac842d4dfb46538e2e59e77a0d52080a153df886", "sha_success": "5795ce0bf3300d05a871d466c973f47308beba45", "workflow": "name: Run pre-commit\n\non:\n push:\n pull_request:\n\njobs:\n check:\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v4\n - uses: actions/setup-python@v4\n - name: Install dependencies\n run: |\n sudo apt update\n sudo apt install --no-install-recommends libxkbcommon-dev\n - uses: pre-commit/[email protected]\n", "logs": [{"step_name": "check/5_Run [email protected]", "log": "##[group]Run pre-commit/[email protected]\nwith:\n extra_args: --all-files\n##[endgroup]\n##[group]Run python -m pip install pre-commit\n\u001b[36;1mpython -m pip install pre-commit\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\n##[endgroup]\nDefaulting to user installation because normal site-packages is not writeable\nCollecting pre-commit\n Downloading pre_commit-3.5.0-py2.py3-none-any.whl (203 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 203.7/203.7 KB 3.9 MB/s eta 0:00:00\nCollecting identify>=1.0.0\n Downloading identify-2.5.31-py2.py3-none-any.whl (98 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 98.9/98.9 KB 9.1 MB/s eta 0:00:00\nCollecting cfgv>=2.0.0\n Downloading cfgv-3.4.0-py2.py3-none-any.whl (7.2 kB)\nRequirement already satisfied: pyyaml>=5.1 in /usr/lib/python3/dist-packages (from pre-commit) (5.4.1)\nCollecting virtualenv>=20.10.0\n Downloading virtualenv-20.24.6-py3-none-any.whl (3.8 MB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 3.8/3.8 MB 21.1 MB/s eta 0:00:00\nCollecting nodeenv>=0.11.1\n Downloading nodeenv-1.8.0-py2.py3-none-any.whl (22 kB)\nRequirement already satisfied: setuptools in /usr/lib/python3/dist-packages (from nodeenv>=0.11.1->pre-commit) (59.6.0)\nCollecting distlib<1,>=0.3.7\n Downloading distlib-0.3.7-py2.py3-none-any.whl (468 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 468.9/468.9 KB 37.7 MB/s eta 0:00:00\nCollecting platformdirs<4,>=3.9.1\n Downloading platformdirs-3.11.0-py3-none-any.whl (17 kB)\nCollecting filelock<4,>=3.12.2\n Downloading filelock-3.13.1-py3-none-any.whl (11 kB)\nInstalling collected packages: distlib, platformdirs, nodeenv, identify, filelock, cfgv, virtualenv, pre-commit\nSuccessfully installed cfgv-3.4.0 distlib-0.3.7 filelock-3.13.1 identify-2.5.31 nodeenv-1.8.0 platformdirs-3.11.0 pre-commit-3.5.0 virtualenv-20.24.6\n##[group]Run python -m pip freeze --local\n\u001b[36;1mpython -m pip freeze --local\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\n##[endgroup]\nargcomplete==3.1.2\ncfgv==3.4.0\ndistlib==0.3.7\nfilelock==3.13.1\nidentify==2.5.31\nnodeenv==1.8.0\npackaging==23.2\npipx==1.2.1\nplatformdirs==3.11.0\npre-commit==3.5.0\nuserpath==1.9.1\nvirtualenv==20.24.6\n##[group]Run actions/cache@v3\nwith:\n path: ~/.cache/pre-commit\n key: pre-commit-3||05c1fbbb63b353467651b511e6ac241ffd2d8e71749cfabfe5ee6bc8366d2d02\n enableCrossOsArchive: false\n fail-on-cache-miss: false\n lookup-only: false\n##[endgroup]\nCache Size: ~36 MB (37431926 B)\n[command]/usr/bin/tar -xf /home/runner/work/_temp/197df8ad-dba5-43ca-8f9c-fc801b310ed7/cache.tzst -P -C /home/runner/work/qtile/qtile --use-compress-program unzstd\nReceived 37431926 of 37431926 (100.0%), 35.7 MBs/sec\nCache restored successfully\nCache restored from key: pre-commit-3||05c1fbbb63b353467651b511e6ac241ffd2d8e71749cfabfe5ee6bc8366d2d02\n##[group]Run pre-commit run --show-diff-on-failure --color=always --all-files\n\u001b[36;1mpre-commit run --show-diff-on-failure --color=always --all-files\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\n##[endgroup]\nblack....................................................................\u001b[42mPassed\u001b[m\nisort....................................................................\u001b[42mPassed\u001b[m\nflake8...................................................................\u001b[42mPassed\u001b[m\nmypy.....................................................................\u001b[41mFailed\u001b[m\n\u001b[2m- hook id: mypy\u001b[m\n\u001b[2m- exit code: 1\u001b[m\n\nlibqtile/hook.py:107: error: Need type annotation for \"hooks\" (hint: \"hooks: Set[<type>] = ...\") [var-annotated]\nlibqtile/hook.py:136: error: Return type \"None\" of \"_subscribe\" incompatible with return type \"Callable[..., Any]\" in supertype \"Subscribe\" [override]\nlibqtile/bar.py:351: error: \"Subscribe\" has no attribute \"setgroup\" [attr-defined]\nlibqtile/bar.py:352: error: \"Subscribe\" has no attribute \"startup_complete\" [attr-defined]\nlibqtile/scratchpad.py:403: error: \"Subscribe\" has no attribute \"client_new\" [attr-defined]\nlibqtile/scratchpad.py:407: error: \"Subscribe\" has no attribute \"client_killed\" [attr-defined]\nlibqtile/scratchpad.py:408: error: \"Subscribe\" has no attribute \"float_change\" [attr-defined]\nlibqtile/layout/screensplit.py:177: error: \"Subscribe\" has no attribute \"focus_change\" [attr-defined]\nlibqtile/layout/screensplit.py:180: error: \"Unsubscribe\" has no attribute \"focus_change\" [attr-defined]\nlibqtile/core/state.py:91: error: \"Subscribe\" has no attribute \"client_new\" [attr-defined]\nlibqtile/core/state.py:110: error: \"Unsubscribe\" has no attribute \"client_new\" [attr-defined]\nlibqtile/core/manager.py:177: error: \"Subscribe\" has no attribute \"setgroup\" [attr-defined]\nlibqtile/core/manager.py:180: error: \"Subscribe\" has no attribute \"screen_change\" [attr-defined]\nlibqtile/widget/prompt.py:412: error: \"Subscribe\" has no attribute \"client_focus\" [attr-defined]\nlibqtile/backend/wayland/core.py:200: error: \"Subscribe\" has no attribute \"startup_complete\" [attr-defined]\nFound 15 errors in 8 files (checked 178 source files)\n\nvulture..................................................................\u001b[42mPassed\u001b[m\n##[error]Process completed with exit code 1.\n"}], "diff": "diff --git a/libqtile/hook.py b/libqtile/hook.py\nindex f405a68d..da33d949 100644\n--- a/libqtile/hook.py\n+++ b/libqtile/hook.py\n@@ -42,6 +42,8 @@ from libqtile.resources.sleep import inhibitor\n if TYPE_CHECKING:\n from typing import Callable\n \n+ HookHandler = Callable[[Callable], Callable]\n+\n subscriptions = {} # type: dict\n \n \n@@ -87,7 +89,7 @@ def _user_hook_func(self):\n def f(func):\n name = f\"user_{hook_name}\"\n if name not in self.hooks:\n- self.hooks.add(name)\n+ self.hooks[name] = None\n return self._subscribe(name, func)\n \n return f\n@@ -102,15 +104,28 @@ class Hook:\n self.func = func\n \n \n-class Subscribe:\n+class HookHandlerCollection:\n def __init__(self, registry_name: str, check_name=True):\n- self.hooks = set([])\n+ self.hooks: dict[str, HookHandler] = {}\n if check_name and registry_name in subscriptions:\n raise NameError(\"A hook registry already exists with that name: {registry_name}\")\n elif registry_name not in subscriptions:\n subscriptions[registry_name] = {}\n self.registry_name = registry_name\n \n+ def __getattr__(self, name: str) -> HookHandler:\n+ if name not in self.hooks:\n+ raise AttributeError\n+ return self.hooks[name]\n+\n+ def _register(self, hook: Hook) -> None:\n+ def _hook_func(func):\n+ return self._subscribe(hook.name, func)\n+\n+ self.hooks[hook.name] = _hook_func if hook.func is None else hook.func(self)\n+\n+\n+class Subscribe(HookHandlerCollection):\n def _subscribe(self, event: str, func: Callable) -> Callable:\n registry = subscriptions.setdefault(self.registry_name, dict())\n lst = registry.setdefault(event, [])\n@@ -118,16 +133,8 @@ class Subscribe:\n lst.append(func)\n return func\n \n- def _register(self, hook: Hook) -> None:\n- def _hook_func(func):\n- return self._subscribe(hook.name, func)\n-\n- self.hooks.add(hook.name)\n- setattr(self, hook.name, _hook_func if hook.func is None else hook.func(self))\n- setattr(getattr(self, hook.name), \"__doc__\", hook.doc)\n-\n \n-class Unsubscribe(Subscribe):\n+class Unsubscribe(HookHandlerCollection):\n \"\"\"\n This class mirrors subscribe, except the _subscribe member has been\n overridden to remove calls from hooks.\n@@ -153,7 +160,7 @@ class Registry:\n self.register_hook(hook)\n \n def register_hook(self, hook: Hook) -> None:\n- if hook.name in dir(self.subscribe):\n+ if hook.name in self.subscribe.hooks:\n raise utils.QtileError(\n f\"Unable to register hook. A hook with that name already exists: {hook.name}\"\n )\n", "difficulty": 2, "changed_files": ["libqtile/hook.py"], "commit_link": "https://github.com/qtile/qtile/tree/ac842d4dfb46538e2e59e77a0d52080a153df886"}
data/python/af9b76a.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"language": "Python", "id": 29, "repo_owner": "lightly-ai", "repo_name": "lightly", "head_branch": "ersi-lig-3910-update-mae-benchmark-code", "workflow_name": "Code Format Check", "workflow_filename": "test_code_format.yml", "workflow_path": ".github/workflows/test_code_format.yml", "contributor": "lightly-ai", "sha_fail": "af9b76a790e0c06f0da674c3fdcd2e4404af1c99", "sha_success": "a0639a7caa3d617fe076547ccd2c3fdfd507aabc", "workflow": "name: Code Format Check\n\non:\n push:\n pull_request:\n workflow_dispatch:\n\njobs:\n test:\n name: Check\n runs-on: ubuntu-latest\n steps:\n - name: Checkout Code\n uses: actions/checkout@v3\n - name: Hack to get setup-python to work on nektos/act\n run: |\n if [ ! -f \"/etc/lsb-release\" ] ; then\n echo \"DISTRIB_RELEASE=18.04\" > /etc/lsb-release\n fi\n - name: Set up Python\n uses: actions/setup-python@v4\n with:\n python-version: \"3.7\"\n - uses: actions/cache@v2\n with:\n path: ${{ env.pythonLocation }}\n key: cache_v2_${{ env.pythonLocation }}-${{ hashFiles('requirements/**') }}\n - name: Install Dependencies and lightly\n run: pip install -e '.[all]'\n - name: Run Format Check\n run: |\n make format-check\n - name: Run Type Check\n run: |\n make type-check\n", "logs": [{"step_name": "Check/7_Run Format Check.txt", "log": "##[group]Run make format-check\n\u001b[36;1mmake format-check\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.7.17/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.7.17/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.7.17/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.7.17/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.7.17/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.7.17/x64/lib\n##[endgroup]\n\u26ab Checking code format...\nisort --check-only --diff .\nERROR: /home/runner/work/lightly/lightly/docs/source/getting_started/benchmarks/imagenette_benchmark.py Imports are incorrectly sorted and/or formatted.\n--- /home/runner/work/lightly/lightly/docs/source/getting_started/benchmarks/imagenette_benchmark.py:before\t2024-01-08 18:14:31.536514\n+++ /home/runner/work/lightly/lightly/docs/source/getting_started/benchmarks/imagenette_benchmark.py:after\t2024-01-08 18:16:08.310119\n@@ -61,8 +61,8 @@\n \"\"\"\n import copy\n import os\n+import sys\n import time\n-import sys\n \n import numpy as np\n import pytorch_lightning as pl\n@@ -94,7 +94,12 @@\n VICRegLoss,\n )\n from lightly.models import modules, utils\n-from lightly.models.modules import heads, masked_autoencoder, masked_autoencoder_timm, memory_bank\n+from lightly.models.modules import (\n+ heads,\n+ masked_autoencoder,\n+ masked_autoencoder_timm,\n+ memory_bank,\n+)\n from lightly.transforms import (\n BYOLTransform,\n BYOLView1Transform,\nSkipped 2 files\nmake: *** [Makefile:45: format-check] Error 1\n##[error]Process completed with exit code 2.\n"}], "diff": "diff --git a/docs/source/getting_started/benchmarks/imagenette_benchmark.py b/docs/source/getting_started/benchmarks/imagenette_benchmark.py\nindex 6dfecb1f..4858e8cf 100644\n--- a/docs/source/getting_started/benchmarks/imagenette_benchmark.py\n+++ b/docs/source/getting_started/benchmarks/imagenette_benchmark.py\n@@ -61,8 +61,8 @@ Results (4.5.2023):\n \"\"\"\n import copy\n import os\n-import time\n import sys\n+import time\n \n import numpy as np\n import pytorch_lightning as pl\n@@ -75,7 +75,9 @@ from pytorch_lightning.loggers import TensorBoardLogger\n try:\n from timm.models import vision_transformer\n except ImportError:\n- print(\"TIMM is not available. Please install in order to run this benchmark for MAE.\")\n+ print(\n+ \"TIMM is not available. Please install in order to run this benchmark for MAE.\"\n+ )\n sys.exit(1)\n \n from lightly.data import LightlyDataset\n@@ -94,7 +96,12 @@ from lightly.loss import (\n VICRegLoss,\n )\n from lightly.models import modules, utils\n-from lightly.models.modules import heads, masked_autoencoder, masked_autoencoder_timm, memory_bank\n+from lightly.models.modules import (\n+ heads,\n+ masked_autoencoder,\n+ masked_autoencoder_timm,\n+ memory_bank,\n+)\n from lightly.transforms import (\n BYOLTransform,\n BYOLView1Transform,\n", "difficulty": 0, "changed_files": ["docs/source/getting_started/benchmarks/imagenette_benchmark.py"], "commit_link": "https://github.com/lightly-ai/lightly/tree/af9b76a790e0c06f0da674c3fdcd2e4404af1c99"}
data/python/b15d4bd.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"language": "Python", "id": 8, "repo_owner": "scrapy", "repo_name": "scrapy", "head_branch": "simplify-attempt2", "workflow_name": "Checks", "workflow_filename": "checks.yml", "workflow_path": ".github/workflows/checks.yml", "contributor": "monicaq21", "sha_fail": "b15d4bd9177149b88d1b0f719e7e6290df81fe9a", "sha_success": "f629f8db5fa0bb8f46fbf64264c1a2d0dec55cfc", "workflow": "name: Checks\non: [push, pull_request]\n\nconcurrency:\n group: ${{github.workflow}}-${{ github.ref }}\n cancel-in-progress: true\n\njobs:\n checks:\n runs-on: ubuntu-latest\n strategy:\n fail-fast: false\n matrix:\n include:\n - python-version: \"3.12\"\n env:\n TOXENV: pylint\n - python-version: 3.8\n env:\n TOXENV: typing\n - python-version: \"3.11\" # Keep in sync with .readthedocs.yml\n env:\n TOXENV: docs\n - python-version: \"3.12\"\n env:\n TOXENV: twinecheck\n\n steps:\n - uses: actions/checkout@v4\n\n - name: Set up Python ${{ matrix.python-version }}\n uses: actions/setup-python@v4\n with:\n python-version: ${{ matrix.python-version }}\n\n - name: Run check\n env: ${{ matrix.env }}\n run: |\n pip install -U tox\n tox\n\n pre-commit:\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v4\n - uses: pre-commit/[email protected]\n", "logs": [{"step_name": "pre-commit/3_Run [email protected]", "log": "##[group]Run pre-commit/[email protected]\nwith:\n extra_args: --all-files\n##[endgroup]\n##[group]Run python -m pip install pre-commit\n\u001b[36;1mpython -m pip install pre-commit\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\n##[endgroup]\nDefaulting to user installation because normal site-packages is not writeable\nCollecting pre-commit\n Downloading pre_commit-3.5.0-py2.py3-none-any.whl (203 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 203.7/203.7 KB 8.3 MB/s eta 0:00:00\nCollecting nodeenv>=0.11.1\n Downloading nodeenv-1.8.0-py2.py3-none-any.whl (22 kB)\nRequirement already satisfied: pyyaml>=5.1 in /usr/lib/python3/dist-packages (from pre-commit) (5.4.1)\nCollecting identify>=1.0.0\n Downloading identify-2.5.32-py2.py3-none-any.whl (98 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 98.9/98.9 KB 12.6 MB/s eta 0:00:00\nCollecting cfgv>=2.0.0\n Downloading cfgv-3.4.0-py2.py3-none-any.whl (7.2 kB)\nCollecting virtualenv>=20.10.0\n Downloading virtualenv-20.25.0-py3-none-any.whl (3.8 MB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 3.8/3.8 MB 27.4 MB/s eta 0:00:00\nRequirement already satisfied: setuptools in /usr/lib/python3/dist-packages (from nodeenv>=0.11.1->pre-commit) (59.6.0)\nCollecting platformdirs<5,>=3.9.1\n Downloading platformdirs-4.1.0-py3-none-any.whl (17 kB)\nCollecting filelock<4,>=3.12.2\n Downloading filelock-3.13.1-py3-none-any.whl (11 kB)\nCollecting distlib<1,>=0.3.7\n Downloading distlib-0.3.7-py2.py3-none-any.whl (468 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 468.9/468.9 KB 43.3 MB/s eta 0:00:00\nInstalling collected packages: distlib, platformdirs, nodeenv, identify, filelock, cfgv, virtualenv, pre-commit\nSuccessfully installed cfgv-3.4.0 distlib-0.3.7 filelock-3.13.1 identify-2.5.32 nodeenv-1.8.0 platformdirs-4.1.0 pre-commit-3.5.0 virtualenv-20.25.0\n##[group]Run python -m pip freeze --local\n\u001b[36;1mpython -m pip freeze --local\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\n##[endgroup]\nargcomplete==3.1.6\ncfgv==3.4.0\ndistlib==0.3.7\nfilelock==3.13.1\nidentify==2.5.32\nnodeenv==1.8.0\npackaging==23.2\npipx==1.2.1\nplatformdirs==4.1.0\npre-commit==3.5.0\nuserpath==1.9.1\nvirtualenv==20.25.0\n##[group]Run actions/cache@v3\nwith:\n path: ~/.cache/pre-commit\n key: pre-commit-3||7a8fe885594aed9a90fd5938b4bb49b65732538a44c08aad3d6ea69d9d0cf64c\n enableCrossOsArchive: false\n fail-on-cache-miss: false\n lookup-only: false\n##[endgroup]\nCache Size: ~33 MB (34938818 B)\n[command]/usr/bin/tar -xf /home/runner/work/_temp/49db49b7-4865-4c29-942c-cd4505f101c6/cache.tzst -P -C /home/runner/work/scrapy/scrapy --use-compress-program unzstd\nCache restored successfully\nCache restored from key: pre-commit-3||7a8fe885594aed9a90fd5938b4bb49b65732538a44c08aad3d6ea69d9d0cf64c\n##[group]Run pre-commit run --show-diff-on-failure --color=always --all-files\n\u001b[36;1mpre-commit run --show-diff-on-failure --color=always --all-files\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\n##[endgroup]\nbandit...................................................................\u001b[42mPassed\u001b[m\nflake8...................................................................\u001b[41mFailed\u001b[m\n\u001b[2m- hook id: flake8\u001b[m\n\u001b[2m- exit code: 1\u001b[m\n\n\u001b[1mscrapy/core/downloader/contextfactory.py\u001b[m\u001b[36m:\u001b[m182\u001b[36m:\u001b[m42\u001b[36m:\u001b[m \u001b[1m\u001b[31mE251\u001b[m unexpected spaces around keyword / parameter equals\n\u001b[1mscrapy/core/downloader/contextfactory.py\u001b[m\u001b[36m:\u001b[m182\u001b[36m:\u001b[m44\u001b[36m:\u001b[m \u001b[1m\u001b[31mE251\u001b[m unexpected spaces around keyword / parameter equals\n\u001b[1mscrapy/utils/misc.py\u001b[m\u001b[36m:\u001b[m51\u001b[36m:\u001b[m17\u001b[36m:\u001b[m \u001b[1m\u001b[31mW291\u001b[m trailing whitespace\n\u001b[1mscrapy/utils/misc.py\u001b[m\u001b[36m:\u001b[m55\u001b[36m:\u001b[m31\u001b[36m:\u001b[m \u001b[1m\u001b[31mW291\u001b[m trailing whitespace\n\u001b[1mscrapy/utils/misc.py\u001b[m\u001b[36m:\u001b[m57\u001b[36m:\u001b[m12\u001b[36m:\u001b[m \u001b[1m\u001b[31mW291\u001b[m trailing whitespace\n\u001b[1mscrapy/utils/misc.py\u001b[m\u001b[36m:\u001b[m63\u001b[36m:\u001b[m1\u001b[36m:\u001b[m \u001b[1m\u001b[31mE302\u001b[m expected 2 blank lines, found 1\n\u001b[1mscrapy/utils/misc.py\u001b[m\u001b[36m:\u001b[m180\u001b[36m:\u001b[m24\u001b[36m:\u001b[m \u001b[1m\u001b[31mW291\u001b[m trailing whitespace\n\u001b[1mscrapy/utils/misc.py\u001b[m\u001b[36m:\u001b[m191\u001b[36m:\u001b[m1\u001b[36m:\u001b[m \u001b[1m\u001b[31mW293\u001b[m blank line contains whitespace\n\u001b[1mscrapy/utils/misc.py\u001b[m\u001b[36m:\u001b[m193\u001b[36m:\u001b[m1\u001b[36m:\u001b[m \u001b[1m\u001b[31mE265\u001b[m block comment should start with '# '\n\u001b[1mscrapy/utils/misc.py\u001b[m\u001b[36m:\u001b[m198\u001b[36m:\u001b[m25\u001b[36m:\u001b[m \u001b[1m\u001b[31mW291\u001b[m trailing whitespace\n\u001b[1mscrapy/utils/misc.py\u001b[m\u001b[36m:\u001b[m213\u001b[36m:\u001b[m1\u001b[36m:\u001b[m \u001b[1m\u001b[31mE303\u001b[m too many blank lines (4)\n\u001b[1mtests/test_addons.py\u001b[m\u001b[36m:\u001b[m112\u001b[36m:\u001b[m41\u001b[36m:\u001b[m \u001b[1m\u001b[31mE999\u001b[m IndentationError: unindent does not match any outer indentation level\n\u001b[1mtests/test_utils_misc/__init__.py\u001b[m\u001b[36m:\u001b[m98\u001b[36m:\u001b[m5\u001b[36m:\u001b[m \u001b[1m\u001b[31mE301\u001b[m expected 1 blank line, found 0\n\u001b[1mtests/test_utils_misc/__init__.py\u001b[m\u001b[36m:\u001b[m125\u001b[36m:\u001b[m35\u001b[36m:\u001b[m \u001b[1m\u001b[31mF821\u001b[m undefined name 'settings'\n\u001b[1mtests/test_utils_misc/__init__.py\u001b[m\u001b[36m:\u001b[m133\u001b[36m:\u001b[m1\u001b[36m:\u001b[m \u001b[1m\u001b[31mW293\u001b[m blank line contains whitespace\n\nblack....................................................................\u001b[41mFailed\u001b[m\n\u001b[2m- hook id: black\u001b[m\n\u001b[2m- exit code: 123\u001b[m\n\u001b[2m- files were modified by this hook\u001b[m\n\n\u001b[1mreformatted scrapy/addons.py\u001b[0m\n\u001b[1mreformatted scrapy/core/downloader/handlers/__init__.py\u001b[0m\n\u001b[1mreformatted scrapy/core/downloader/contextfactory.py\u001b[0m\n\u001b[1mreformatted scrapy/core/engine.py\u001b[0m\n\u001b[1mreformatted scrapy/crawler.py\u001b[0m\n\u001b[1mreformatted scrapy/utils/misc.py\u001b[0m\n\u001b[31merror: cannot format tests/test_addons.py: unindent does not match any outer indentation level (<tokenize>, line 112)\u001b[0m\n\u001b[1mreformatted tests/test_downloader_handlers.py\u001b[0m\n\u001b[1mreformatted tests/test_utils_misc/__init__.py\u001b[0m\n\n\u001b[1mOh no! \ud83d\udca5 \ud83d\udc94 \ud83d\udca5\u001b[0m\n\u001b[34m\u001b[1m8 files \u001b[0m\u001b[1mreformatted\u001b[0m, \u001b[34m332 files \u001b[0mleft unchanged, \u001b[31m1 file failed to reformat\u001b[0m.\n\nisort....................................................................\u001b[41mFailed\u001b[m\n\u001b[2m- hook id: isort\u001b[m\n\u001b[2m- files were modified by this hook\u001b[m\n\nFixing /home/runner/work/scrapy/scrapy/scrapy/core/downloader/handlers/http10.py\nFixing /home/runner/work/scrapy/scrapy/scrapy/extensions/feedexport.py\n\nblacken-docs.............................................................\u001b[42mPassed\u001b[m\npre-commit hook(s) made changes.\nIf you are seeing this message in CI, reproduce locally with: `pre-commit run --all-files`.\nTo run `pre-commit` as part of git workflow, use `pre-commit install`.\nAll changes made by hooks:\n\u001b[1mdiff --git a/scrapy/addons.py b/scrapy/addons.py\u001b[m\n\u001b[1mindex a6efba3..cb1574b 100644\u001b[m\n\u001b[1m--- a/scrapy/addons.py\u001b[m\n\u001b[1m+++ b/scrapy/addons.py\u001b[m\n\u001b[36m@@ -33,9 +33,7 @@\u001b[m \u001b[mclass AddonManager:\u001b[m\n try:\u001b[m\n addoncls = load_object(clspath)\u001b[m\n # changes create_instance call to build_from_settings\u001b[m\n\u001b[31m- addon = build_from_settings(\u001b[m\n\u001b[31m- addoncls, settings=settings\u001b[m\n\u001b[31m- )\u001b[m\n\u001b[32m+\u001b[m\u001b[32m addon = build_from_settings(addoncls, settings=settings)\u001b[m\n addon.update_settings(settings)\u001b[m\n self.addons.append(addon)\u001b[m\n except NotConfigured as e:\u001b[m\n\u001b[1mdiff --git a/scrapy/core/downloader/contextfactory.py b/scrapy/core/downloader/contextfactory.py\u001b[m\n\u001b[1mindex 6d3ddd4..73a3d32 100644\u001b[m\n\u001b[1m--- a/scrapy/core/downloader/contextfactory.py\u001b[m\n\u001b[1m+++ b/scrapy/core/downloader/contextfactory.py\u001b[m\n\u001b[36m@@ -178,9 +178,7 @@\u001b[m \u001b[mdef load_context_factory_from_settings(settings, crawler):\u001b[m\n except TypeError:\u001b[m\n # use context factory defaults\u001b[m\n # changes create_instance call to build_from_settings\u001b[m\n\u001b[31m- context_factory = build_from_settings(\u001b[m\n\u001b[31m- context_factory_cls, settings = settings\u001b[m\n\u001b[31m- )\u001b[m\n\u001b[32m+\u001b[m\u001b[32m context_factory = build_from_settings(context_factory_cls, settings=settings)\u001b[m\n # context_factory = create_instance(\u001b[m\n # objcls=context_factory_cls,\u001b[m\n # settings=settings,\u001b[m\n\u001b[1mdiff --git a/scrapy/core/downloader/handlers/__init__.py b/scrapy/core/downloader/handlers/__init__.py\u001b[m\n\u001b[1mindex d5b8ce8..fc597b0 100644\u001b[m\n\u001b[1m--- a/scrapy/core/downloader/handlers/__init__.py\u001b[m\n\u001b[1m+++ b/scrapy/core/downloader/handlers/__init__.py\u001b[m\n\u001b[36m@@ -56,9 +56,7 @@\u001b[m \u001b[mclass DownloadHandlers:\u001b[m\n if skip_lazy and getattr(dhcls, \"lazy\", True):\u001b[m\n return None\u001b[m\n # change create_instance call to build_from_settings\u001b[m\n\u001b[31m- dh = build_from_settings(\u001b[m\n\u001b[31m- dhcls, settings=self._crawler.settings\u001b[m\n\u001b[31m- )\u001b[m\n\u001b[32m+\u001b[m\u001b[32m dh = build_from_settings(dhcls, settings=self._crawler.settings)\u001b[m\n # dh = create_instance(\u001b[m\n # objcls=dhcls,\u001b[m\n # settings=self._crawler.settings,\u001b[m\n\u001b[1mdiff --git a/scrapy/core/downloader/handlers/http10.py b/scrapy/core/downloader/handlers/http10.py\u001b[m\n\u001b[1mindex 76faf7d..5c01ba4 100644\u001b[m\n\u001b[1m--- a/scrapy/core/downloader/handlers/http10.py\u001b[m\n\u001b[1m+++ b/scrapy/core/downloader/handlers/http10.py\u001b[m\n\u001b[36m@@ -1,6 +1,6 @@\u001b[m\n \"\"\"Download handlers for http and https schemes\u001b[m\n \"\"\"\u001b[m\n\u001b[31m-from scrapy.utils.misc import build_from_settings, build_from_crawler, load_object\u001b[m\n\u001b[32m+\u001b[m\u001b[32mfrom scrapy.utils.misc import build_from_crawler, build_from_settings, load_object\u001b[m\n from scrapy.utils.python import to_unicode\u001b[m\n \u001b[m\n \u001b[m\n\u001b[1mdiff --git a/scrapy/core/engine.py b/scrapy/core/engine.py\u001b[m\n\u001b[1mindex 81aacdf..281dc8a 100644\u001b[m\n\u001b[1m--- a/scrapy/core/engine.py\u001b[m\n\u001b[1m+++ b/scrapy/core/engine.py\u001b[m\n\u001b[36m@@ -358,9 +358,7 @@\u001b[m \u001b[mclass ExecutionEngine:\u001b[m\n raise RuntimeError(f\"No free spider slot when opening {spider.name!r}\")\u001b[m\n logger.info(\"Spider opened\", extra={\"spider\": spider})\u001b[m\n nextcall = CallLaterOnce(self._next_request)\u001b[m\n\u001b[31m- scheduler = build_from_crawler(\u001b[m\n\u001b[31m- self.scheduler_cls, crawler=self.crawler\u001b[m\n\u001b[31m- )\u001b[m\n\u001b[32m+\u001b[m\u001b[32m scheduler = build_from_crawler(self.scheduler_cls, crawler=self.crawler)\u001b[m\n start_requests = yield self.scraper.spidermw.process_start_requests(\u001b[m\n start_requests, spider\u001b[m\n )\u001b[m\n\u001b[1mdiff --git a/scrapy/crawler.py b/scrapy/crawler.py\u001b[m\n\u001b[1mindex a1f699b..1280953 100644\u001b[m\n\u001b[1m--- a/scrapy/crawler.py\u001b[m\n\u001b[1m+++ b/scrapy/crawler.py\u001b[m\n\u001b[36m@@ -111,7 +111,8 @@\u001b[m \u001b[mclass Crawler:\u001b[m\n \u001b[m\n # changes create_instance call to build_from_settings\u001b[m\n self.request_fingerprinter = build_from_settings(\u001b[m\n\u001b[31m- load_object(self.settings[\"REQUEST_FINGERPRINTER_CLASS\"]), settings=self.settings\u001b[m\n\u001b[32m+\u001b[m\u001b[32m load_object(self.settings[\"REQUEST_FINGERPRINTER_CLASS\"]),\u001b[m\n\u001b[32m+\u001b[m\u001b[32m settings=self.settings,\u001b[m\n )\u001b[m\n \u001b[m\n # self.request_fingerprinter = create_instance(\u001b[m\n\u001b[1mdiff --git a/scrapy/extensions/feedexport.py b/scrapy/extensions/feedexport.py\u001b[m\n\u001b[1mindex a4c8a47..62fb07e 100644\u001b[m\n\u001b[1m--- a/scrapy/extensions/feedexport.py\u001b[m\n\u001b[1m+++ b/scrapy/extensions/feedexport.py\u001b[m\n\u001b[36m@@ -28,7 +28,7 @@\u001b[m \u001b[mfrom scrapy.utils.defer import maybe_deferred_to_future\u001b[m\n from scrapy.utils.deprecate import create_deprecated_class\u001b[m\n from scrapy.utils.ftp import ftp_store_file\u001b[m\n from scrapy.utils.log import failure_to_exc_info\u001b[m\n\u001b[31m-from scrapy.utils.misc import build_from_settings, build_from_crawler, load_object\u001b[m\n\u001b[32m+\u001b[m\u001b[32mfrom scrapy.utils.misc import build_from_crawler, build_from_settings, load_object\u001b[m\n from scrapy.utils.python import without_none_values\u001b[m\n \u001b[m\n logger = logging.getLogger(__name__)\u001b[m\n\u001b[1mdiff --git a/scrapy/utils/misc.py b/scrapy/utils/misc.py\u001b[m\n\u001b[1mindex 613328a..63270e9 100644\u001b[m\n\u001b[1m--- a/scrapy/utils/misc.py\u001b[m\n\u001b[1m+++ b/scrapy/utils/misc.py\u001b[m\n\u001b[36m@@ -47,19 +47,21 @@\u001b[m \u001b[mdef arg_to_iter(arg: Any) -> Iterable[Any]:\u001b[m\n return cast(Iterable[Any], arg)\u001b[m\n return [arg]\u001b[m\n \u001b[m\n\u001b[32m+\u001b[m\n # def build_from_crawler(objcls, crawler, none, *args, **kwargs):\u001b[m\n\u001b[31m-# if crawler \u001b[m\n\u001b[32m+\u001b[m\u001b[32m# if crawler\u001b[m\n # if hasattr(objcls, \"from_crawler\"):\u001b[m\n # instance = objcls.from_crawler(crawler, *args, **kwargs)\u001b[m\n # method_name = \"from_crawler\"\u001b[m\n\u001b[31m-# if instance is None: \u001b[m\n\u001b[32m+\u001b[m\u001b[32m# if instance is None:\u001b[m\n # raise TypeError(f\"{objcls.__qualname__}.{method_name} returned None)\u001b[m\n\u001b[31m-# else: \u001b[m\n\u001b[32m+\u001b[m\u001b[32m# else:\u001b[m\n # instance = objcls(*args, **kwargs)\u001b[m\n # method_name = \"__new__\"\u001b[m\n \u001b[m\n # return instance\u001b[m\n \u001b[m\n\u001b[32m+\u001b[m\n def load_object(path: Union[str, Callable]) -> Any:\u001b[m\n \"\"\"Load an object given its absolute object path, and return it.\u001b[m\n \u001b[m\n\u001b[36m@@ -177,7 +179,7 @@\u001b[m \u001b[mdef rel_has_nofollow(rel: Optional[str]) -> bool:\u001b[m\n # Raises typeError is instance is None\u001b[m\n # Creates a class instance using 'from_crawler' constructor\u001b[m\n def build_from_crawler(objcls, crawler, /, *args, **kwargs):\u001b[m\n\u001b[31m- if crawler is None: \u001b[m\n\u001b[32m+\u001b[m\u001b[32m if crawler is None:\u001b[m\n raise ValueError(\"Specify crawler.\")\u001b[m\n if crawler and hasattr(objcls, \"from_crawler\"):\u001b[m\n instance = objcls.from_crawler(crawler, *args, **kwargs)\u001b[m\n\u001b[36m@@ -188,14 +190,14 @@\u001b[m \u001b[mdef build_from_crawler(objcls, crawler, /, *args, **kwargs):\u001b[m\n if instance is None:\u001b[m\n raise TypeError(f\"{objcls.__qualname__}.{method_name} returned None\")\u001b[m\n return instance\u001b[m\n\u001b[31m- \u001b[m\n \u001b[m\n\u001b[31m-#``*args`` and ``**kwargs`` are forwarded to the constructors.\u001b[m\n\u001b[32m+\u001b[m\n\u001b[32m+\u001b[m\u001b[32m# ``*args`` and ``**kwargs`` are forwarded to the constructors.\u001b[m\n # Raises ``ValueError`` if``settings`` is``None``.\u001b[m\n # Raises typeError is instance is None\u001b[m\n # Creates a class instance using 'from_settings' constructor\u001b[m\n def build_from_settings(objcls, settings, /, *args, **kwargs):\u001b[m\n\u001b[31m- if settings is None: \u001b[m\n\u001b[32m+\u001b[m\u001b[32m if settings is None:\u001b[m\n raise ValueError(\"Specify settings.\")\u001b[m\n if settings and hasattr(objcls, \"from_settings\"):\u001b[m\n instance = objcls.from_settings(settings, *args, **kwargs)\u001b[m\n\u001b[36m@@ -208,8 +210,6 @@\u001b[m \u001b[mdef build_from_settings(objcls, settings, /, *args, **kwargs):\u001b[m\n return instance\u001b[m\n \u001b[m\n \u001b[m\n\u001b[31m-\u001b[m\n\u001b[31m-\u001b[m\n @contextmanager\u001b[m\n def set_environ(**kwargs: str) -> Generator[None, Any, None]:\u001b[m\n \"\"\"Temporarily set environment variables inside the context manager and\u001b[m\n\u001b[1mdiff --git a/tests/test_downloader_handlers.py b/tests/test_downloader_handlers.py\u001b[m\n\u001b[1mindex 8595e68..37a5364 100644\u001b[m\n\u001b[1m--- a/tests/test_downloader_handlers.py\u001b[m\n\u001b[1m+++ b/tests/test_downloader_handlers.py\u001b[m\n\u001b[36m@@ -669,9 +669,7 @@\u001b[m \u001b[mclass Https11CustomCiphers(unittest.TestCase):\u001b[m\n crawler = get_crawler(\u001b[m\n settings_dict={\"DOWNLOADER_CLIENT_TLS_CIPHERS\": \"CAMELLIA256-SHA\"}\u001b[m\n )\u001b[m\n\u001b[31m- self.download_handler = build_from_crawler(\u001b[m\n\u001b[31m- self.download_handler_cls, crawler\u001b[m\n\u001b[31m- )\u001b[m\n\u001b[32m+\u001b[m\u001b[32m self.download_handler = build_from_crawler(self.download_handler_cls, crawler)\u001b[m\n self.download_request = self.download_handler.download_request\u001b[m\n \u001b[m\n @defer.inlineCallbacks\u001b[m\n\u001b[36m@@ -1036,9 +1034,7 @@\u001b[m \u001b[mclass BaseFTPTestCase(unittest.TestCase):\u001b[m\n self.port = reactor.listenTCP(0, self.factory, interface=\"127.0.0.1\")\u001b[m\n self.portNum = self.port.getHost().port\u001b[m\n crawler = get_crawler()\u001b[m\n\u001b[31m- self.download_handler = build_from_crawler(\u001b[m\n\u001b[31m- FTPDownloadHandler, crawler\u001b[m\n\u001b[31m- )\u001b[m\n\u001b[32m+\u001b[m\u001b[32m self.download_handler = build_from_crawler(FTPDownloadHandler, crawler)\u001b[m\n self.addCleanup(self.port.stopListening)\u001b[m\n \u001b[m\n def tearDown(self):\u001b[m\n\u001b[36m@@ -1182,9 +1178,7 @@\u001b[m \u001b[mclass AnonymousFTPTestCase(BaseFTPTestCase):\u001b[m\n self.port = reactor.listenTCP(0, self.factory, interface=\"127.0.0.1\")\u001b[m\n self.portNum = self.port.getHost().port\u001b[m\n crawler = get_crawler()\u001b[m\n\u001b[31m- self.download_handler = build_from_crawler(\u001b[m\n\u001b[31m- FTPDownloadHandler, crawler\u001b[m\n\u001b[31m- )\u001b[m\n\u001b[32m+\u001b[m\u001b[32m self.download_handler = build_from_crawler(FTPDownloadHandler, crawler)\u001b[m\n self.addCleanup(self.port.stopListening)\u001b[m\n \u001b[m\n def tearDown(self):\u001b[m\n\u001b[36m@@ -1194,9 +1188,7 @@\u001b[m \u001b[mclass AnonymousFTPTestCase(BaseFTPTestCase):\u001b[m\n class DataURITestCase(unittest.TestCase):\u001b[m\n def setUp(self):\u001b[m\n crawler = get_crawler()\u001b[m\n\u001b[31m- self.download_handler = build_from_crawler(\u001b[m\n\u001b[31m- DataURIDownloadHandler, crawler\u001b[m\n\u001b[31m- )\u001b[m\n\u001b[32m+\u001b[m\u001b[32m self.download_handler = build_from_crawler(DataURIDownloadHandler, crawler)\u001b[m\n self.download_request = self.download_handler.download_request\u001b[m\n self.spider = Spider(\"foo\")\u001b[m\n \u001b[m\n\u001b[1mdiff --git a/tests/test_utils_misc/__init__.py b/tests/test_utils_misc/__init__.py\u001b[m\n\u001b[1mindex ccf8022..115ee7e 100644\u001b[m\n\u001b[1m--- a/tests/test_utils_misc/__init__.py\u001b[m\n\u001b[1m+++ b/tests/test_utils_misc/__init__.py\u001b[m\n\u001b[36m@@ -95,6 +95,7 @@\u001b[m \u001b[mclass UtilsMiscTestCase(unittest.TestCase):\u001b[m\n self.assertEqual(\u001b[m\n list(arg_to_iter(TestItem(name=\"john\"))), [TestItem(name=\"john\")]\u001b[m\n )\u001b[m\n\u001b[32m+\u001b[m\n def test_build_from_crawler(self):\u001b[m\n crawler = mock.MagicMock(spec_set=[\"settings\"])\u001b[m\n args = (True, 100.0)\u001b[m\n\u001b[36m@@ -130,7 +131,7 @@\u001b[m \u001b[mclass UtilsMiscTestCase(unittest.TestCase):\u001b[m\n m.from_crawler.return_value = None\u001b[m\n with self.assertRaises(TypeError):\u001b[m\n build_from_crawler(m, crawler, *args, **kwargs)\u001b[m\n\u001b[31m- \u001b[m\n\u001b[32m+\u001b[m\n def test_build_from_settings(self):\u001b[m\n settings = mock.MagicMock()\u001b[m\n args = (True, 100.0)\u001b[m\n##[error]Process completed with exit code 1.\n"}, {"step_name": "checks (3.12, pylint)/4_Run check.txt", "log": "##[group]Run pip install -U tox\n\u001b[36;1mpip install -U tox\u001b[0m\n\u001b[36;1mtox\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.12.0/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.12.0/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.0/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.0/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.12.0/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.12.0/x64/lib\n TOXENV: pylint\n##[endgroup]\nCollecting tox\n Downloading tox-4.11.4-py3-none-any.whl.metadata (5.0 kB)\nCollecting cachetools>=5.3.1 (from tox)\n Downloading cachetools-5.3.2-py3-none-any.whl.metadata (5.2 kB)\nCollecting chardet>=5.2 (from tox)\n Downloading chardet-5.2.0-py3-none-any.whl.metadata (3.4 kB)\nCollecting colorama>=0.4.6 (from tox)\n Downloading colorama-0.4.6-py2.py3-none-any.whl (25 kB)\nCollecting filelock>=3.12.3 (from tox)\n Downloading filelock-3.13.1-py3-none-any.whl.metadata (2.8 kB)\nCollecting packaging>=23.1 (from tox)\n Downloading packaging-23.2-py3-none-any.whl.metadata (3.2 kB)\nCollecting platformdirs>=3.10 (from tox)\n Downloading platformdirs-4.1.0-py3-none-any.whl.metadata (11 kB)\nCollecting pluggy>=1.3 (from tox)\n Downloading pluggy-1.3.0-py3-none-any.whl.metadata (4.3 kB)\nCollecting pyproject-api>=1.6.1 (from tox)\n Downloading pyproject_api-1.6.1-py3-none-any.whl.metadata (2.8 kB)\nCollecting virtualenv>=20.24.3 (from tox)\n Downloading virtualenv-20.25.0-py3-none-any.whl.metadata (4.5 kB)\nCollecting distlib<1,>=0.3.7 (from virtualenv>=20.24.3->tox)\n Downloading distlib-0.3.7-py2.py3-none-any.whl.metadata (5.1 kB)\nDownloading tox-4.11.4-py3-none-any.whl (153 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 154.0/154.0 kB 24.9 MB/s eta 0:00:00\nDownloading cachetools-5.3.2-py3-none-any.whl (9.3 kB)\nDownloading chardet-5.2.0-py3-none-any.whl (199 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 199.4/199.4 kB 43.0 MB/s eta 0:00:00\nDownloading filelock-3.13.1-py3-none-any.whl (11 kB)\nDownloading packaging-23.2-py3-none-any.whl (53 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 53.0/53.0 kB 15.5 MB/s eta 0:00:00\nDownloading platformdirs-4.1.0-py3-none-any.whl (17 kB)\nDownloading pluggy-1.3.0-py3-none-any.whl (18 kB)\nDownloading pyproject_api-1.6.1-py3-none-any.whl (12 kB)\nDownloading virtualenv-20.25.0-py3-none-any.whl (3.8 MB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 3.8/3.8 MB 86.7 MB/s eta 0:00:00\nDownloading distlib-0.3.7-py2.py3-none-any.whl (468 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 468.9/468.9 kB 75.2 MB/s eta 0:00:00\nInstalling collected packages: distlib, pluggy, platformdirs, packaging, filelock, colorama, chardet, cachetools, virtualenv, pyproject-api, tox\nSuccessfully installed cachetools-5.3.2 chardet-5.2.0 colorama-0.4.6 distlib-0.3.7 filelock-3.13.1 packaging-23.2 platformdirs-4.1.0 pluggy-1.3.0 pyproject-api-1.6.1 tox-4.11.4 virtualenv-20.25.0\n\n[notice] A new release of pip is available: 23.2.1 -> 23.3.1\n[notice] To update, run: pip install --upgrade pip\npylint: install_deps> python -I -m pip install -ctests/upper-constraints.txt boto3 google-cloud-storage 'markupsafe<2.1.0' 'mitmproxy<8,>=4.0.4; python_version < \"3.9\" and implementation_name != \"pypy\"' Pillow pylint==3.0.1 robotexclusionrulesparser 'Twisted[http2]' -r tests/requirements.txt\n.pkg: install_requires> python -I -m pip install 'setuptools>=40.8.0' wheel\n.pkg: _optional_hooks> python /opt/hostedtoolcache/Python/3.12.0/x64/lib/python3.12/site-packages/pyproject_api/_backend.py True setuptools.build_meta __legacy__\n.pkg: get_requires_for_build_sdist> python /opt/hostedtoolcache/Python/3.12.0/x64/lib/python3.12/site-packages/pyproject_api/_backend.py True setuptools.build_meta __legacy__\n.pkg: get_requires_for_build_wheel> python /opt/hostedtoolcache/Python/3.12.0/x64/lib/python3.12/site-packages/pyproject_api/_backend.py True setuptools.build_meta __legacy__\n.pkg: install_requires_for_build_wheel> python -I -m pip install wheel\n.pkg: freeze> python -m pip freeze --all\n.pkg: pip==23.3.1,setuptools==69.0.2,wheel==0.42.0\n.pkg: prepare_metadata_for_build_wheel> python /opt/hostedtoolcache/Python/3.12.0/x64/lib/python3.12/site-packages/pyproject_api/_backend.py True setuptools.build_meta __legacy__\n.pkg: build_sdist> python /opt/hostedtoolcache/Python/3.12.0/x64/lib/python3.12/site-packages/pyproject_api/_backend.py True setuptools.build_meta __legacy__\npylint: install_package_deps> python -I -m pip install -ctests/upper-constraints.txt 'PyDispatcher>=2.0.5; platform_python_implementation == \"CPython\"' 'PyPyDispatcher>=2.1.0; platform_python_implementation == \"PyPy\"' 'Twisted>=18.9.0' 'cryptography>=36.0.0' 'cssselect>=0.9.1' 'itemadapter>=0.1.0' 'itemloaders>=1.0.1' 'lxml>=4.4.1' packaging 'parsel>=1.5.0' 'protego>=0.1.15' 'pyOpenSSL>=21.0.0' 'queuelib>=1.4.2' 'service-identity>=18.1.0' setuptools tldextract 'w3lib>=1.17.0' 'zope.interface>=5.1.0'\npylint: install_package> python -I -m pip install -ctests/upper-constraints.txt --force-reinstall --no-deps /home/runner/work/scrapy/scrapy/.tox/.tmp/package/1/Scrapy-2.11.0.tar.gz\npylint: freeze> python -m pip freeze --all\npylint: astroid==3.0.1,asttokens==2.4.1,attrs==23.1.0,Automat==22.10.0,blessed==1.20.0,boto3==1.33.7,botocore==1.33.7,bpython==0.24,Brotli==1.1.0,cachetools==5.3.2,certifi==2023.11.17,cffi==1.16.0,charset-normalizer==3.3.2,constantly==23.10.4,coverage==7.3.2,cryptography==41.0.7,cssselect==1.2.0,curtsies==0.4.2,cwcwidth==0.1.9,decorator==5.1.1,dill==0.3.7,execnet==2.0.2,executing==2.0.1,filelock==3.13.1,google-api-core==2.14.0,google-auth==2.24.0,google-cloud-core==2.3.3,google-cloud-storage==2.13.0,google-crc32c==1.5.0,google-resumable-media==2.6.0,googleapis-common-protos==1.61.0,greenlet==3.0.1,h2==4.1.0,hpack==4.0.0,hyperframe==6.0.1,hyperlink==21.0.0,idna==3.6,incremental==22.10.0,iniconfig==2.0.0,ipython==8.18.1,isort==5.12.0,itemadapter==0.8.0,itemloaders==1.1.0,jedi==0.19.1,jmespath==1.0.1,lxml==4.9.3,MarkupSafe==2.0.1,matplotlib-inline==0.1.6,mccabe==0.7.0,packaging==23.2,parsel==1.8.1,parso==0.8.3,pexpect==4.9.0,Pillow==10.1.0,pip==23.3.1,platformdirs==4.1.0,pluggy==1.3.0,priority==1.3.0,prompt-toolkit==3.0.41,Protego==0.3.0,protobuf==4.25.1,ptyprocess==0.7.0,pure-eval==0.2.2,pyasn1==0.5.1,pyasn1-modules==0.3.0,pycparser==2.21,PyDispatcher==2.0.7,pyftpdlib==1.5.9,Pygments==2.17.2,pylint==3.0.1,pyOpenSSL==23.3.0,pytest==7.4.3,pytest-cov==4.0.0,pytest-xdist==3.5.0,python-dateutil==2.8.2,pyxdg==0.28,queuelib==1.6.2,requests==2.31.0,requests-file==1.5.1,robotexclusionrulesparser==1.7.1,rsa==4.9,s3transfer==0.8.2,Scrapy @ file:///home/runner/work/scrapy/scrapy/.tox/.tmp/package/1/Scrapy-2.11.0.tar.gz#sha256=95dfd13c5691d88425ffee44c03ee9a12098077f4caac9cff2b4fc70ff9404ec,service-identity==23.1.0,setuptools==69.0.2,six==1.16.0,stack-data==0.6.3,sybil==6.0.2,testfixtures==7.2.2,tldextract==5.1.1,tomlkit==0.12.3,traitlets==5.14.0,Twisted==23.10.0,typing_extensions==4.8.0,urllib3==2.0.7,uvloop==0.19.0,w3lib==2.1.2,wcwidth==0.2.12,zope.interface==6.1,zstandard==0.22.0\npylint: commands[0]> pylint conftest.py docs extras scrapy setup.py tests\n<unknown>:230: SyntaxWarning: invalid escape sequence '\\d'\n************* Module tests.test_addons\ntests/test_addons.py:112:40: E0001: Parsing failed: 'unindent does not match any outer indentation level (<unknown>, line 112)' (syntax-error)\n************* Module scrapy.addons\nscrapy/addons.py:36:24: W1117: 'settings' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\n************* Module scrapy.crawler\nscrapy/crawler.py:113:37: W1117: 'settings' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\n************* Module scrapy.core.engine\nscrapy/core/engine.py:361:20: W1117: 'crawler' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\n************* Module scrapy.core.scheduler\nscrapy/core/scheduler.py:325:15: W1117: 'crawler' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\nscrapy/core/scheduler.py:336:12: W1117: 'crawler' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\n************* Module scrapy.core.downloader.contextfactory\nscrapy/core/downloader/contextfactory.py:169:26: W1117: 'settings' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\nscrapy/core/downloader/contextfactory.py:181:26: W1117: 'settings' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\n************* Module scrapy.core.downloader.handlers\nscrapy/core/downloader/handlers/__init__.py:59:17: W1117: 'settings' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\n************* Module scrapy.core.downloader.handlers.http10\nscrapy/core/downloader/handlers/http10.py:34:41: W1117: 'objcls' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\nscrapy/core/downloader/handlers/http10.py:34:41: W1117: 'crawler' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\nscrapy/core/downloader/handlers/http10.py:39:41: W1117: 'objcls' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\nscrapy/core/downloader/handlers/http10.py:39:41: W1117: 'settings' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\n************* Module scrapy.core.downloader.handlers.s3\nscrapy/core/downloader/handlers/s3.py:54:28: W1117: 'objcls' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\nscrapy/core/downloader/handlers/s3.py:54:28: W1117: 'crawler' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\nscrapy/core/downloader/handlers/s3.py:59:28: W1117: 'objcls' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\nscrapy/core/downloader/handlers/s3.py:59:28: W1117: 'settings' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\n************* Module scrapy.utils.misc\nscrapy/utils/misc.py:51:16: C0303: Trailing whitespace (trailing-whitespace)\nscrapy/utils/misc.py:55:30: C0303: Trailing whitespace (trailing-whitespace)\nscrapy/utils/misc.py:57:11: C0303: Trailing whitespace (trailing-whitespace)\nscrapy/utils/misc.py:180:23: C0303: Trailing whitespace (trailing-whitespace)\nscrapy/utils/misc.py:191:0: C0303: Trailing whitespace (trailing-whitespace)\nscrapy/utils/misc.py:198:24: C0303: Trailing whitespace (trailing-whitespace)\n************* Module tests.test_downloader_handlers\ntests/test_downloader_handlers.py:833:22: W1117: 'objcls' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\ntests/test_downloader_handlers.py:833:22: W1117: 'crawler' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\ntests/test_downloader_handlers.py:863:17: W1117: 'objcls' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\ntests/test_downloader_handlers.py:863:17: W1117: 'crawler' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\ntests/test_downloader_handlers.py:890:12: W1117: 'objcls' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\ntests/test_downloader_handlers.py:890:12: W1117: 'crawler' will be included in '**kwargs' since a positional-only parameter with this name already exists (kwarg-superseded-by-positional-arg)\n************* Module tests.test_utils_misc.__init__\ntests/test_utils_misc/__init__.py:133:0: C0303: Trailing whitespace (trailing-whitespace)\n\n-----------------------------------\nYour code has been rated at 9.99/10\n\npylint: exit 22 (57.23 seconds) /home/runner/work/scrapy/scrapy> pylint conftest.py docs extras scrapy setup.py tests pid=2046\n.pkg: _exit> python /opt/hostedtoolcache/Python/3.12.0/x64/lib/python3.12/site-packages/pyproject_api/_backend.py True setuptools.build_meta __legacy__\n pylint: FAIL code 22 (101.48=setup[44.25]+cmd[57.23] seconds)\n evaluation failed :( (101.60 seconds)\n##[error]Process completed with exit code 22.\n"}, {"step_name": "checks (3.8, typing)/4_Run check.txt", "log": "##[group]Run pip install -U tox\n\u001b[36;1mpip install -U tox\u001b[0m\n\u001b[36;1mtox\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.8.18/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.8.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.8.18/x64/lib\n TOXENV: typing\n##[endgroup]\nCollecting tox\n Downloading tox-4.11.4-py3-none-any.whl.metadata (5.0 kB)\nCollecting cachetools>=5.3.1 (from tox)\n Downloading cachetools-5.3.2-py3-none-any.whl.metadata (5.2 kB)\nCollecting chardet>=5.2 (from tox)\n Downloading chardet-5.2.0-py3-none-any.whl.metadata (3.4 kB)\nCollecting colorama>=0.4.6 (from tox)\n Downloading colorama-0.4.6-py2.py3-none-any.whl (25 kB)\nCollecting filelock>=3.12.3 (from tox)\n Downloading filelock-3.13.1-py3-none-any.whl.metadata (2.8 kB)\nCollecting packaging>=23.1 (from tox)\n Downloading packaging-23.2-py3-none-any.whl.metadata (3.2 kB)\nCollecting platformdirs>=3.10 (from tox)\n Downloading platformdirs-4.1.0-py3-none-any.whl.metadata (11 kB)\nCollecting pluggy>=1.3 (from tox)\n Downloading pluggy-1.3.0-py3-none-any.whl.metadata (4.3 kB)\nCollecting pyproject-api>=1.6.1 (from tox)\n Downloading pyproject_api-1.6.1-py3-none-any.whl.metadata (2.8 kB)\nCollecting tomli>=2.0.1 (from tox)\n Downloading tomli-2.0.1-py3-none-any.whl (12 kB)\nCollecting virtualenv>=20.24.3 (from tox)\n Downloading virtualenv-20.25.0-py3-none-any.whl.metadata (4.5 kB)\nCollecting distlib<1,>=0.3.7 (from virtualenv>=20.24.3->tox)\n Downloading distlib-0.3.7-py2.py3-none-any.whl.metadata (5.1 kB)\nDownloading tox-4.11.4-py3-none-any.whl (153 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 154.0/154.0 kB 18.9 MB/s eta 0:00:00\nDownloading cachetools-5.3.2-py3-none-any.whl (9.3 kB)\nDownloading chardet-5.2.0-py3-none-any.whl (199 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 199.4/199.4 kB 47.2 MB/s eta 0:00:00\nDownloading filelock-3.13.1-py3-none-any.whl (11 kB)\nDownloading packaging-23.2-py3-none-any.whl (53 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 53.0/53.0 kB 15.7 MB/s eta 0:00:00\nDownloading platformdirs-4.1.0-py3-none-any.whl (17 kB)\nDownloading pluggy-1.3.0-py3-none-any.whl (18 kB)\nDownloading pyproject_api-1.6.1-py3-none-any.whl (12 kB)\nDownloading virtualenv-20.25.0-py3-none-any.whl (3.8 MB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 3.8/3.8 MB 102.9 MB/s eta 0:00:00\nDownloading distlib-0.3.7-py2.py3-none-any.whl (468 kB)\n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 468.9/468.9 kB 75.6 MB/s eta 0:00:00\nInstalling collected packages: distlib, tomli, pluggy, platformdirs, packaging, filelock, colorama, chardet, cachetools, virtualenv, pyproject-api, tox\nSuccessfully installed cachetools-5.3.2 chardet-5.2.0 colorama-0.4.6 distlib-0.3.7 filelock-3.13.1 packaging-23.2 platformdirs-4.1.0 pluggy-1.3.0 pyproject-api-1.6.1 tomli-2.0.1 tox-4.11.4 virtualenv-20.25.0\n\n[notice] A new release of pip is available: 23.0.1 -> 23.3.1\n[notice] To update, run: pip install --upgrade pip\ntyping: install_deps> python -I -m pip install -ctests/upper-constraints.txt mypy==1.6.1 types-attrs==19.1.0 types-lxml==2023.10.21 types-Pillow==10.1.0.0 types-Pygments==2.16.0.0 types-pyOpenSSL==23.3.0.0 types-setuptools==68.2.0.0 typing-extensions==4.8.0 'w3lib>=2.1.2'\n.pkg: install_requires> python -I -m pip install 'setuptools>=40.8.0' wheel\n.pkg: _optional_hooks> python /opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/site-packages/pyproject_api/_backend.py True setuptools.build_meta __legacy__\n.pkg: get_requires_for_build_sdist> python /opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/site-packages/pyproject_api/_backend.py True setuptools.build_meta __legacy__\n.pkg: get_requires_for_build_wheel> python /opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/site-packages/pyproject_api/_backend.py True setuptools.build_meta __legacy__\n.pkg: install_requires_for_build_wheel> python -I -m pip install wheel\n.pkg: freeze> python -m pip freeze --all\n.pkg: pip==23.3.1,setuptools==69.0.2,wheel==0.42.0\n.pkg: prepare_metadata_for_build_wheel> python /opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/site-packages/pyproject_api/_backend.py True setuptools.build_meta __legacy__\n.pkg: build_sdist> python /opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/site-packages/pyproject_api/_backend.py True setuptools.build_meta __legacy__\ntyping: install_package_deps> python -I -m pip install -ctests/upper-constraints.txt 'PyDispatcher>=2.0.5; platform_python_implementation == \"CPython\"' 'PyPyDispatcher>=2.1.0; platform_python_implementation == \"PyPy\"' 'Twisted>=18.9.0' 'cryptography>=36.0.0' 'cssselect>=0.9.1' 'itemadapter>=0.1.0' 'itemloaders>=1.0.1' 'lxml>=4.4.1' packaging 'parsel>=1.5.0' 'protego>=0.1.15' 'pyOpenSSL>=21.0.0' 'queuelib>=1.4.2' 'service-identity>=18.1.0' setuptools tldextract 'w3lib>=1.17.0' 'zope.interface>=5.1.0'\ntyping: install_package> python -I -m pip install -ctests/upper-constraints.txt --force-reinstall --no-deps /home/runner/work/scrapy/scrapy/.tox/.tmp/package/1/Scrapy-2.11.0.tar.gz\ntyping: freeze> python -m pip freeze --all\ntyping: attrs==23.1.0,Automat==22.10.0,certifi==2023.11.17,cffi==1.16.0,charset-normalizer==3.3.2,constantly==23.10.4,cryptography==41.0.7,cssselect==1.2.0,filelock==3.13.1,hyperlink==21.0.0,idna==3.6,incremental==22.10.0,itemadapter==0.8.0,itemloaders==1.1.0,jmespath==1.0.1,lxml==4.9.3,mypy==1.6.1,mypy-extensions==1.0.0,packaging==23.2,parsel==1.8.1,pip==23.3.1,Protego==0.3.0,pyasn1==0.5.1,pyasn1-modules==0.3.0,pycparser==2.21,PyDispatcher==2.0.7,pyOpenSSL==23.3.0,queuelib==1.6.2,requests==2.31.0,requests-file==1.5.1,Scrapy @ file:///home/runner/work/scrapy/scrapy/.tox/.tmp/package/1/Scrapy-2.11.0.tar.gz#sha256=10ffed5c2108f38048c2270acaf9e181014495f11170991e45b186156308910b,service-identity==23.1.0,setuptools==69.0.2,six==1.16.0,tldextract==5.1.1,tomli==2.0.1,Twisted==23.10.0,types-attrs==19.1.0,types-beautifulsoup4==4.12.0.7,types-docutils==0.20.0.3,types-html5lib==1.1.11.15,types-lxml==2023.10.21,types-Pillow==10.1.0.0,types-Pygments==2.16.0.0,types-pyOpenSSL==23.3.0.0,types-setuptools==68.2.0.0,typing_extensions==4.8.0,urllib3==2.1.0,w3lib==2.1.2,wheel==0.42.0,zope.interface==6.1\ntyping: commands[0]> mypy scrapy tests\ntests/test_addons.py:112: error: unindent does not match any outer indentation level [syntax]\nFound 1 error in 1 file (errors prevented further checking)\ntyping: exit 2 (0.47 seconds) /home/runner/work/scrapy/scrapy> mypy scrapy tests pid=1974\n.pkg: _exit> python /opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/site-packages/pyproject_api/_backend.py True setuptools.build_meta __legacy__\n typing: FAIL code 2 (17.46=setup[16.99]+cmd[0.47] seconds)\n evaluation failed :( (17.55 seconds)\n##[error]Process completed with exit code 2.\n"}], "diff": "diff --git a/scrapy/addons.py b/scrapy/addons.py\nindex a6efba362..cb1574b2f 100644\n--- a/scrapy/addons.py\n+++ b/scrapy/addons.py\n@@ -33,9 +33,7 @@ class AddonManager:\n try:\n addoncls = load_object(clspath)\n # changes create_instance call to build_from_settings\n- addon = build_from_settings(\n- addoncls, settings=settings\n- )\n+ addon = build_from_settings(addoncls, settings=settings)\n addon.update_settings(settings)\n self.addons.append(addon)\n except NotConfigured as e:\ndiff --git a/scrapy/core/downloader/contextfactory.py b/scrapy/core/downloader/contextfactory.py\nindex 6d3ddd4a4..73a3d32af 100644\n--- a/scrapy/core/downloader/contextfactory.py\n+++ b/scrapy/core/downloader/contextfactory.py\n@@ -178,9 +178,7 @@ def load_context_factory_from_settings(settings, crawler):\n except TypeError:\n # use context factory defaults\n # changes create_instance call to build_from_settings\n- context_factory = build_from_settings(\n- context_factory_cls, settings = settings\n- )\n+ context_factory = build_from_settings(context_factory_cls, settings=settings)\n # context_factory = create_instance(\n # objcls=context_factory_cls,\n # settings=settings,\ndiff --git a/scrapy/core/downloader/handlers/__init__.py b/scrapy/core/downloader/handlers/__init__.py\nindex d5b8ce89d..fc597b017 100644\n--- a/scrapy/core/downloader/handlers/__init__.py\n+++ b/scrapy/core/downloader/handlers/__init__.py\n@@ -56,9 +56,7 @@ class DownloadHandlers:\n if skip_lazy and getattr(dhcls, \"lazy\", True):\n return None\n # change create_instance call to build_from_settings\n- dh = build_from_settings(\n- dhcls, settings=self._crawler.settings\n- )\n+ dh = build_from_settings(dhcls, settings=self._crawler.settings)\n # dh = create_instance(\n # objcls=dhcls,\n # settings=self._crawler.settings,\ndiff --git a/scrapy/core/downloader/handlers/http10.py b/scrapy/core/downloader/handlers/http10.py\nindex 76faf7d2b..5c01ba410 100644\n--- a/scrapy/core/downloader/handlers/http10.py\n+++ b/scrapy/core/downloader/handlers/http10.py\n@@ -1,6 +1,6 @@\n \"\"\"Download handlers for http and https schemes\n \"\"\"\n-from scrapy.utils.misc import build_from_settings, build_from_crawler, load_object\n+from scrapy.utils.misc import build_from_crawler, build_from_settings, load_object\n from scrapy.utils.python import to_unicode\n \n \ndiff --git a/scrapy/core/engine.py b/scrapy/core/engine.py\nindex 81aacdf8d..281dc8a54 100644\n--- a/scrapy/core/engine.py\n+++ b/scrapy/core/engine.py\n@@ -358,9 +358,7 @@ class ExecutionEngine:\n raise RuntimeError(f\"No free spider slot when opening {spider.name!r}\")\n logger.info(\"Spider opened\", extra={\"spider\": spider})\n nextcall = CallLaterOnce(self._next_request)\n- scheduler = build_from_crawler(\n- self.scheduler_cls, crawler=self.crawler\n- )\n+ scheduler = build_from_crawler(self.scheduler_cls, crawler=self.crawler)\n start_requests = yield self.scraper.spidermw.process_start_requests(\n start_requests, spider\n )\ndiff --git a/scrapy/crawler.py b/scrapy/crawler.py\nindex a1f699b7e..128095368 100644\n--- a/scrapy/crawler.py\n+++ b/scrapy/crawler.py\n@@ -111,7 +111,8 @@ class Crawler:\n \n # changes create_instance call to build_from_settings\n self.request_fingerprinter = build_from_settings(\n- load_object(self.settings[\"REQUEST_FINGERPRINTER_CLASS\"]), settings=self.settings\n+ load_object(self.settings[\"REQUEST_FINGERPRINTER_CLASS\"]),\n+ settings=self.settings,\n )\n \n # self.request_fingerprinter = create_instance(\ndiff --git a/scrapy/extensions/feedexport.py b/scrapy/extensions/feedexport.py\nindex a4c8a470d..62fb07e19 100644\n--- a/scrapy/extensions/feedexport.py\n+++ b/scrapy/extensions/feedexport.py\n@@ -28,7 +28,7 @@ from scrapy.utils.defer import maybe_deferred_to_future\n from scrapy.utils.deprecate import create_deprecated_class\n from scrapy.utils.ftp import ftp_store_file\n from scrapy.utils.log import failure_to_exc_info\n-from scrapy.utils.misc import build_from_settings, build_from_crawler, load_object\n+from scrapy.utils.misc import build_from_crawler, build_from_settings, load_object\n from scrapy.utils.python import without_none_values\n \n logger = logging.getLogger(__name__)\ndiff --git a/scrapy/utils/misc.py b/scrapy/utils/misc.py\nindex 613328a2b..2e39aaaa3 100644\n--- a/scrapy/utils/misc.py\n+++ b/scrapy/utils/misc.py\n@@ -47,19 +47,21 @@ def arg_to_iter(arg: Any) -> Iterable[Any]:\n return cast(Iterable[Any], arg)\n return [arg]\n \n+\n # def build_from_crawler(objcls, crawler, none, *args, **kwargs):\n-# if crawler \n+# if crawler\n # if hasattr(objcls, \"from_crawler\"):\n # instance = objcls.from_crawler(crawler, *args, **kwargs)\n # method_name = \"from_crawler\"\n-# if instance is None: \n+# if instance is None:\n # raise TypeError(f\"{objcls.__qualname__}.{method_name} returned None)\n-# else: \n+# else:\n # instance = objcls(*args, **kwargs)\n # method_name = \"__new__\"\n \n # return instance\n \n+\n def load_object(path: Union[str, Callable]) -> Any:\n \"\"\"Load an object given its absolute object path, and return it.\n \n@@ -176,8 +178,8 @@ def rel_has_nofollow(rel: Optional[str]) -> bool:\n # Raises ``ValueError`` if``crawler`` is``None``.\n # Raises typeError is instance is None\n # Creates a class instance using 'from_crawler' constructor\n-def build_from_crawler(objcls, crawler, /, *args, **kwargs):\n- if crawler is None: \n+def build_from_crawler(objcls, crawler, *args, **kwargs):\n+ if crawler is None:\n raise ValueError(\"Specify crawler.\")\n if crawler and hasattr(objcls, \"from_crawler\"):\n instance = objcls.from_crawler(crawler, *args, **kwargs)\n@@ -188,14 +190,14 @@ def build_from_crawler(objcls, crawler, /, *args, **kwargs):\n if instance is None:\n raise TypeError(f\"{objcls.__qualname__}.{method_name} returned None\")\n return instance\n- \n \n-#``*args`` and ``**kwargs`` are forwarded to the constructors.\n+\n+# ``*args`` and ``**kwargs`` are forwarded to the constructors.\n # Raises ``ValueError`` if``settings`` is``None``.\n # Raises typeError is instance is None\n # Creates a class instance using 'from_settings' constructor\n-def build_from_settings(objcls, settings, /, *args, **kwargs):\n- if settings is None: \n+def build_from_settings(objcls, settings, *args, **kwargs):\n+ if settings is None:\n raise ValueError(\"Specify settings.\")\n if settings and hasattr(objcls, \"from_settings\"):\n instance = objcls.from_settings(settings, *args, **kwargs)\n@@ -208,8 +210,6 @@ def build_from_settings(objcls, settings, /, *args, **kwargs):\n return instance\n \n \n-\n-\n @contextmanager\n def set_environ(**kwargs: str) -> Generator[None, Any, None]:\n \"\"\"Temporarily set environment variables inside the context manager and\ndiff --git a/tests/test_addons.py b/tests/test_addons.py\nindex 355539162..5e390c49d 100644\n--- a/tests/test_addons.py\n+++ b/tests/test_addons.py\n@@ -109,7 +109,7 @@ class AddonManagerTest(unittest.TestCase):\n self.assertIsInstance(manager.addons[0], CreateInstanceAddon)\n self.assertEqual(crawler.settings.get(\"MYADDON_KEY\"), \"val\")\n \n- def test_build_from_crawler(self):\n+ def test_build_from_crawler(self):\n settings_dict = {\n \"ADDONS\": {\"tests.test_addons.CreateInstanceAddon\": 0},\n \"MYADDON\": {\"MYADDON_KEY\": \"val\"},\ndiff --git a/tests/test_downloader_handlers.py b/tests/test_downloader_handlers.py\nindex 8595e68c2..37a53643b 100644\n--- a/tests/test_downloader_handlers.py\n+++ b/tests/test_downloader_handlers.py\n@@ -669,9 +669,7 @@ class Https11CustomCiphers(unittest.TestCase):\n crawler = get_crawler(\n settings_dict={\"DOWNLOADER_CLIENT_TLS_CIPHERS\": \"CAMELLIA256-SHA\"}\n )\n- self.download_handler = build_from_crawler(\n- self.download_handler_cls, crawler\n- )\n+ self.download_handler = build_from_crawler(self.download_handler_cls, crawler)\n self.download_request = self.download_handler.download_request\n \n @defer.inlineCallbacks\n@@ -1036,9 +1034,7 @@ class BaseFTPTestCase(unittest.TestCase):\n self.port = reactor.listenTCP(0, self.factory, interface=\"127.0.0.1\")\n self.portNum = self.port.getHost().port\n crawler = get_crawler()\n- self.download_handler = build_from_crawler(\n- FTPDownloadHandler, crawler\n- )\n+ self.download_handler = build_from_crawler(FTPDownloadHandler, crawler)\n self.addCleanup(self.port.stopListening)\n \n def tearDown(self):\n@@ -1182,9 +1178,7 @@ class AnonymousFTPTestCase(BaseFTPTestCase):\n self.port = reactor.listenTCP(0, self.factory, interface=\"127.0.0.1\")\n self.portNum = self.port.getHost().port\n crawler = get_crawler()\n- self.download_handler = build_from_crawler(\n- FTPDownloadHandler, crawler\n- )\n+ self.download_handler = build_from_crawler(FTPDownloadHandler, crawler)\n self.addCleanup(self.port.stopListening)\n \n def tearDown(self):\n@@ -1194,9 +1188,7 @@ class AnonymousFTPTestCase(BaseFTPTestCase):\n class DataURITestCase(unittest.TestCase):\n def setUp(self):\n crawler = get_crawler()\n- self.download_handler = build_from_crawler(\n- DataURIDownloadHandler, crawler\n- )\n+ self.download_handler = build_from_crawler(DataURIDownloadHandler, crawler)\n self.download_request = self.download_handler.download_request\n self.spider = Spider(\"foo\")\n \ndiff --git a/tests/test_utils_misc/__init__.py b/tests/test_utils_misc/__init__.py\nindex ccf8022c9..0152a1110 100644\n--- a/tests/test_utils_misc/__init__.py\n+++ b/tests/test_utils_misc/__init__.py\n@@ -95,6 +95,7 @@ class UtilsMiscTestCase(unittest.TestCase):\n self.assertEqual(\n list(arg_to_iter(TestItem(name=\"john\"))), [TestItem(name=\"john\")]\n )\n+\n def test_build_from_crawler(self):\n crawler = mock.MagicMock(spec_set=[\"settings\"])\n args = (True, 100.0)\n@@ -122,7 +123,7 @@ class UtilsMiscTestCase(unittest.TestCase):\n )\n for specs in spec_sets:\n m = mock.MagicMock(spec_set=specs)\n- _test_with_crawler(m, settings, crawler)\n+ _test_with_crawler(m, crawler)\n m.reset_mock()\n \n # Check adoption of crawler\n@@ -130,7 +131,7 @@ class UtilsMiscTestCase(unittest.TestCase):\n m.from_crawler.return_value = None\n with self.assertRaises(TypeError):\n build_from_crawler(m, crawler, *args, **kwargs)\n- \n+\n def test_build_from_settings(self):\n settings = mock.MagicMock()\n args = (True, 100.0)\ndiff --git a/tests/test_webclient.py b/tests/test_webclient.py\nindex 0042fe8f0..a69d9c1b0 100644\n--- a/tests/test_webclient.py\n+++ b/tests/test_webclient.py\n@@ -24,7 +24,7 @@ from scrapy.core.downloader import webclient as client\n from scrapy.core.downloader.contextfactory import ScrapyClientContextFactory\n from scrapy.http import Headers, Request\n from scrapy.settings import Settings\n-from scrapy.utils.misc import create_instance\n+from scrapy.utils.misc import build_from_settings\n from scrapy.utils.python import to_bytes, to_unicode\n from tests.mockserver import (\n BrokenDownloadResource,\n@@ -470,8 +470,8 @@ class WebClientCustomCiphersSSLTestCase(WebClientSSLTestCase):\n def testPayload(self):\n s = \"0123456789\" * 10\n settings = Settings({\"DOWNLOADER_CLIENT_TLS_CIPHERS\": self.custom_ciphers})\n- client_context_factory = create_instance(\n- ScrapyClientContextFactory, settings=settings, crawler=None\n+ client_context_factory = build_from_settings(\n+ ScrapyClientContextFactory, settings=settings\n )\n return getPage(\n self.getURL(\"payload\"), body=s, contextFactory=client_context_factory\n@@ -482,8 +482,8 @@ class WebClientCustomCiphersSSLTestCase(WebClientSSLTestCase):\n settings = Settings(\n {\"DOWNLOADER_CLIENT_TLS_CIPHERS\": \"ECDHE-RSA-AES256-GCM-SHA384\"}\n )\n- client_context_factory = create_instance(\n- ScrapyClientContextFactory, settings=settings, crawler=None\n+ client_context_factory = build_from_settings(\n+ ScrapyClientContextFactory, settings=settings\n )\n d = getPage(\n self.getURL(\"payload\"), body=s, contextFactory=client_context_factory\n", "difficulty": 0, "changed_files": ["scrapy/addons.py", "scrapy/core/downloader/contextfactory.py", "scrapy/core/downloader/handlers/__init__.py", "scrapy/core/downloader/handlers/http10.py", "scrapy/core/engine.py", "scrapy/crawler.py", "scrapy/extensions/feedexport.py", "scrapy/utils/misc.py", "tests/test_addons.py", "tests/test_downloader_handlers.py", "tests/test_utils_misc/__init__.py", "tests/test_webclient.py"], "commit_link": "https://github.com/scrapy/scrapy/tree/b15d4bd9177149b88d1b0f719e7e6290df81fe9a"}
data/python/b4cd344.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"language": "Python", "id": 4, "repo_owner": "mikel-brostrom", "repo_name": "yolo_tracking", "head_branch": "master", "workflow_name": "CI CPU testing", "workflow_filename": "ci.yml", "workflow_path": ".github/workflows/ci.yml", "contributor": "mikel-brostrom", "sha_fail": "b4cd344b4fd4316112beb324fe04d703fd2b7254", "sha_success": "075bec6fa4288a82f3833b03183819f0494d66b3", "workflow": "# name of the workflow, what it is doing (optional)\nname: CI CPU testing\n\n# events that trigger the workflow (required)\non:\n push:\n branches: [master, CIdebug]\n pull_request:\n # pull request where master is target\n branches: [master]\n\nenv:\n # Directory of PyPi package to be tested\n PACKAGE_DIR: boxmot\n # Minimum acceptable test coverage\n # Increase as you add more tests to increase coverage\n COVERAGE_FAIL_UNDER: 29\n\n# the workflow that gets triggerd\njobs:\n build:\n runs-on: ${{ matrix.os }}\n strategy:\n fail-fast: false\n matrix:\n os: [ubuntu-latest] # skip windows-latest for\n python-version: ['3.8', '3.9', '3.10']\n #model: ['yolov8n', 'yolo_nas_s', yolox_n] # yolo models to test\n #tracking-methods: ['deepocsort', 'ocsort', 'botsort', 'strongsort', 'bytetrack'] # tracking methods to test\n\n # Timeout: https://stackoverflow.com/a/59076067/4521646\n timeout-minutes: 50\n steps:\n\n - uses: actions/checkout@v4 # Check out the repository\n - uses: actions/setup-python@v5 # Prepare environment with python 3.9\n with:\n python-version: ${{ matrix.python-version }}\n cache: 'pip' # caching pip dependencies\n - name: Install requirements\n shell: bash # for Windows compatibility\n run: |\n python -m pip install --upgrade pip setuptools wheel\n pip install -e . pytest pytest-cov --extra-index-url https://download.pytorch.org/whl/cpu\n python --version\n pip --version\n pip list\n\n - name: Tests all tracking options\n shell: bash # for Windows compatibility\n env:\n IMG: ./assets/MOT17-mini/train/MOT17-05-FRCNN/img1/000001.jpg\n run: |\n # deepocsort fro all supported yolo models\n python examples/track.py --tracking-method deepocsort --source $IMG --imgsz 320\n python examples/track.py --yolo-model yolo_nas_s --tracking-method deepocsort --source $IMG --imgsz 320\n # python examples/track.py --yolo-model yolox_n --tracking-method deepocsort --source $IMG --imgsz 320\n\n # hybridsort\n python examples/track.py --tracking-method hybridsort --source $IMG --imgsz 320\n\n # botsort\n python examples/track.py --tracking-method botsort --source $IMG --imgsz 320\n\n # strongsort\n python examples/track.py --tracking-method strongsort --source $IMG --imgsz 320\n\n # ocsort\n python examples/track.py --tracking-method ocsort --source $IMG --imgsz 320\n\n # bytetrack\n python examples/track.py --tracking-method bytetrack --source $IMG --imgsz 320\n\n - name: Pytest tests # after tracking options as this does not download models\n shell: bash # for Windows compatibility\n run: |\n\n # needed in TFLite export\n wget https://github.com/PINTO0309/onnx2tf/releases/download/1.16.31/flatc.tar.gz\n tar -zxvf flatc.tar.gz\n sudo chmod +x flatc\n sudo mv flatc /usr/bin/\n\n pytest --cov=$PACKAGE_DIR --cov-report=html -v tests\n coverage report --fail-under=$COVERAGE_FAIL_UNDER\n\n - name: Tests exported reid models\n env:\n IMG: ./assets/MOT17-mini/train/MOT17-05-FRCNN/img1/000001.jpg\n shell: bash # for Windows compatibility\n run: |\n\n # test exported reid model\n python examples/track.py --reid-model examples/weights/osnet_x0_25_msmt17.torchscript --source $IMG --imgsz 320\n python examples/track.py --reid-model examples/weights/osnet_x0_25_msmt17.onnx --source $IMG --imgsz 320\n #python examples/track.py --reid-model examples/weights/osnet_x0_25_msmt17_saved_model/osnet_x0_25_msmt17_float16.tflite --source $IMG --imgsz 320\n python examples/track.py --reid-model examples/weights/osnet_x0_25_msmt17_openvino_model --source $IMG --imgsz 320\n\n - name: Test tracking with seg models\n env:\n IMG: ./assets/MOT17-mini/train/MOT17-05-FRCNN/img1/000001.jpg\n shell: bash # for Windows compatibility\n run: |\n # tracking with SEG models\n python examples/track.py --tracking-method deepocsort --yolo-model yolov8n-seg.pt --source $IMG\n\n - name: Test tracking with pose models\n env:\n IMG: ./assets/MOT17-mini/train/MOT17-05-FRCNN/img1/000001.jpg\n shell: bash # for Windows compatibility\n run: |\n # tracking with POSE models\n python3 examples/track.py --yolo-model weights/yolov8n.pt --source $IMG --imgsz 320\n\n - name: Test validation on MOT17 subset\n shell: bash # for Windows compatibility\n run: |\n # validation on a few MOT17 imges\n python examples/val.py --tracking-method deepocsort --yolo-model yolov8n.pt --benchmark MOT17-mini --imgsz 320 --conf 0.2\n\n - name: Test evolution on MOT17 subset\n shell: bash # for Windows compatibility\n run: |\n # evolve a for a single set of parameters\n python examples/evolve.py --objectives HOTA,MOTA,IDF1 --benchmark MOT17-mini --n-trials 1 --imgsz 320 --conf 0.2\n", "logs": [{"step_name": "build (ubuntu-latest, 3.8)/6_Pytest tests.txt", "log": "##[group]Run # needed in TFLite export\n\u001b[36;1m\u001b[0m\n\u001b[36;1m# needed in TFLite export\u001b[0m\n\u001b[36;1mwget https://github.com/PINTO0309/onnx2tf/releases/download/1.16.31/flatc.tar.gz\u001b[0m\n\u001b[36;1mtar -zxvf flatc.tar.gz\u001b[0m\n\u001b[36;1msudo chmod +x flatc\u001b[0m\n\u001b[36;1msudo mv flatc /usr/bin/\u001b[0m\n\u001b[36;1m\u001b[0m\n\u001b[36;1mpytest --cov=$PACKAGE_DIR --cov-report=html -v tests\u001b[0m\n\u001b[36;1mcoverage report --fail-under=$COVERAGE_FAIL_UNDER\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\nenv:\n PACKAGE_DIR: boxmot\n COVERAGE_FAIL_UNDER: 29\n pythonLocation: /opt/hostedtoolcache/Python/3.8.18/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.8.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.8.18/x64/lib\n##[endgroup]\n--2024-01-12 21:22:49-- https://github.com/PINTO0309/onnx2tf/releases/download/1.16.31/flatc.tar.gz\nResolving github.com (github.com)... 140.82.113.4\nConnecting to github.com (github.com)|140.82.113.4|:443... connected.\nHTTP request sent, awaiting response... 302 Found\nLocation: https://objects.githubusercontent.com/github-production-release-asset-2e65be/541831874/29499355-44ab-4fb6-86c8-582f4bad68a3?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIAVCODYLSA53PQK4ZA%2F20240112%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Date=20240112T212250Z&X-Amz-Expires=300&X-Amz-Signature=0bb64fda387269a4ce4fae58a47108dda06e20cbe0b749718c07eb4d3da60221&X-Amz-SignedHeaders=host&actor_id=0&key_id=0&repo_id=541831874&response-content-disposition=attachment%3B%20filename%3Dflatc.tar.gz&response-content-type=application%2Foctet-stream [following]\n--2024-01-12 21:22:50-- https://objects.githubusercontent.com/github-production-release-asset-2e65be/541831874/29499355-44ab-4fb6-86c8-582f4bad68a3?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIAVCODYLSA53PQK4ZA%2F20240112%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Date=20240112T212250Z&X-Amz-Expires=300&X-Amz-Signature=0bb64fda387269a4ce4fae58a47108dda06e20cbe0b749718c07eb4d3da60221&X-Amz-SignedHeaders=host&actor_id=0&key_id=0&repo_id=541831874&response-content-disposition=attachment%3B%20filename%3Dflatc.tar.gz&response-content-type=application%2Foctet-stream\nResolving objects.githubusercontent.com (objects.githubusercontent.com)... 185.199.108.133, 185.199.111.133, 185.199.110.133, ...\nConnecting to objects.githubusercontent.com (objects.githubusercontent.com)|185.199.108.133|:443... connected.\nHTTP request sent, awaiting response... 200 OK\nLength: 1382707 (1.3M) [application/octet-stream]\nSaving to: \u2018flatc.tar.gz\u2019\n\n 0K .......... .......... .......... .......... .......... 3% 13.7M 0s\n 50K .......... .......... .......... .......... .......... 7% 30.9M 0s\n 100K .......... .......... .......... .......... .......... 11% 129M 0s\n 150K .......... .......... .......... .......... .......... 14% 176M 0s\n 200K .......... .......... .......... .......... .......... 18% 46.6M 0s\n 250K .......... .......... .......... .......... .......... 22% 174M 0s\n 300K .......... .......... .......... .......... .......... 25% 142M 0s\n 350K .......... .......... .......... .......... .......... 29% 206M 0s\n 400K .......... .......... .......... .......... .......... 33% 256M 0s\n 450K .......... .......... .......... .......... .......... 37% 121M 0s\n 500K .......... .......... .......... .......... .......... 40% 171M 0s\n 550K .......... .......... .......... .......... .......... 44% 137M 0s\n 600K .......... .......... .......... .......... .......... 48% 185M 0s\n 650K .......... .......... .......... .......... .......... 51% 163M 0s\n 700K .......... .......... .......... .......... .......... 55% 129M 0s\n 750K .......... .......... .......... .......... .......... 59% 163M 0s\n 800K .......... .......... .......... .......... .......... 62% 175M 0s\n 850K .......... .......... .......... .......... .......... 66% 246M 0s\n 900K .......... .......... .......... .......... .......... 70% 150M 0s\n 950K .......... .......... .......... .......... .......... 74% 257M 0s\n 1000K .......... .......... .......... .......... .......... 77% 251M 0s\n 1050K .......... .......... .......... .......... .......... 81% 232M 0s\n 1100K .......... .......... .......... .......... .......... 85% 255M 0s\n 1150K .......... .......... .......... .......... .......... 88% 257M 0s\n 1200K .......... .......... .......... .......... .......... 92% 258M 0s\n 1250K .......... .......... .......... .......... .......... 96% 222M 0s\n 1300K .......... .......... .......... .......... .......... 99% 260M 0s\n 1350K 100% 572G=0.01s\n\n2024-01-12 21:22:50 (105 MB/s) - \u2018flatc.tar.gz\u2019 saved [1382707/1382707]\n\nflatc\n============================= test session starts ==============================\nplatform linux -- Python 3.8.18, pytest-7.4.4, pluggy-1.3.0 -- /opt/hostedtoolcache/Python/3.8.18/x64/bin/python\ncachedir: .pytest_cache\nrootdir: /home/runner/work/yolo_tracking/yolo_tracking\nplugins: hydra-core-1.3.2, cov-4.1.0\ncollecting ... collected 14 items\n\ntests/test_exports.py::test_export_torchscript PASSED [ 7%]\ntests/test_exports.py::test_export_onnx PASSED [ 14%]\ntests/test_exports.py::test_export_openvino PASSED [ 21%]\ntests/test_python.py::test_strongsort_instantiation PASSED [ 28%]\ntests/test_python.py::test_botsort_instantiation PASSED [ 35%]\ntests/test_python.py::test_deepocsort_instantiation PASSED [ 42%]\ntests/test_python.py::test_ocsort_instantiation PASSED [ 50%]\ntests/test_python.py::test_bytetrack_instantiation PASSED [ 57%]\ntests/test_python.py::test_deepocsort_output FAILED [ 64%]\ntests/test_python.py::test_ocsort_output PASSED [ 71%]\ntests/test_python.py::test_botsort_output PASSED [ 78%]\ntests/test_python.py::test_bytetrack_output PASSED [ 85%]\ntests/test_python.py::test_strongsort_output PASSED [ 92%]\ntests/test_python.py::test_gsi PASSED [100%]\n\n=================================== FAILURES ===================================\n____________________________ test_deepocsort_output ____________________________\n\n def test_deepocsort_output():\n tracker_conf = get_tracker_config('deepocsort')\n> tracker = create_tracker(\n tracker_type='deepocsort',\n tracker_config=tracker_conf,\n reid_weights=WEIGHTS / 'mobilenetv2_x1_4_dukemtmcreid.pt',\n device='cpu',\n half=False,\n per_class=False,\n asso_func=\"centroid\"\n )\nE TypeError: create_tracker() got an unexpected keyword argument 'asso_func'\n\ntests/test_python.py:53: TypeError\n=============================== warnings summary ===============================\nboxmot/utils/checks.py:5\n /home/runner/work/yolo_tracking/yolo_tracking/boxmot/utils/checks.py:5: DeprecationWarning: pkg_resources is deprecated as an API. See https://setuptools.pypa.io/en/latest/pkg_resources.html\n import pkg_resources\n\n../../../../../opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/site-packages/pkg_resources/__init__.py:2868\n /opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/site-packages/pkg_resources/__init__.py:2868: DeprecationWarning: Deprecated call to `pkg_resources.declare_namespace('mpl_toolkits')`.\n Implementing implicit namespace packages (as specified in PEP 420) is preferred to `pkg_resources.declare_namespace`. See https://setuptools.pypa.io/en/latest/references/keywords.html#keyword-namespace-packages\n declare_namespace(pkg)\n\n../../../../../opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/site-packages/pkg_resources/__init__.py:2868\n /opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/site-packages/pkg_resources/__init__.py:2868: DeprecationWarning: Deprecated call to `pkg_resources.declare_namespace('google')`.\n Implementing implicit namespace packages (as specified in PEP 420) is preferred to `pkg_resources.declare_namespace`. See https://setuptools.pypa.io/en/latest/references/keywords.html#keyword-namespace-packages\n declare_namespace(pkg)\n\n../../../../../opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/site-packages/pkg_resources/__init__.py:2868\n../../../../../opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/site-packages/pkg_resources/__init__.py:2868\n../../../../../opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/site-packages/pkg_resources/__init__.py:2868\n../../../../../opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/site-packages/pkg_resources/__init__.py:2868\n /opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/site-packages/pkg_resources/__init__.py:2868: DeprecationWarning: Deprecated call to `pkg_resources.declare_namespace('sphinxcontrib')`.\n Implementing implicit namespace packages (as specified in PEP 420) is preferred to `pkg_resources.declare_namespace`. See https://setuptools.pypa.io/en/latest/references/keywords.html#keyword-namespace-packages\n declare_namespace(pkg)\n\ntests/test_exports.py::test_export_onnx\n /home/runner/work/yolo_tracking/yolo_tracking/boxmot/appearance/backbones/osnet.py:389: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs!\n if return_featuremaps:\n\ntests/test_exports.py::test_export_openvino\n /opt/hostedtoolcache/Python/3.8.18/x64/lib/python3.8/site-packages/defusedxml/__init__.py:30: DeprecationWarning: defusedxml.cElementTree is deprecated, import from defusedxml.ElementTree instead.\n from . import cElementTree\n\n-- Docs: https://docs.pytest.org/en/stable/how-to/capture-warnings.html\n\n---------- coverage: platform linux, python 3.8.18-final-0 -----------\nCoverage HTML written to dir htmlcov\n\n=========================== short test summary info ============================\nFAILED tests/test_python.py::test_deepocsort_output - TypeError: create_tracker() got an unexpected keyword argument 'asso_func'\n================== 1 failed, 13 passed, 9 warnings in 31.50s ===================\n##[error]Process completed with exit code 1.\n"}, {"step_name": "build (ubuntu-latest, 3.9)/6_Pytest tests.txt", "log": "##[group]Run # needed in TFLite export\n\u001b[36;1m\u001b[0m\n\u001b[36;1m# needed in TFLite export\u001b[0m\n\u001b[36;1mwget https://github.com/PINTO0309/onnx2tf/releases/download/1.16.31/flatc.tar.gz\u001b[0m\n\u001b[36;1mtar -zxvf flatc.tar.gz\u001b[0m\n\u001b[36;1msudo chmod +x flatc\u001b[0m\n\u001b[36;1msudo mv flatc /usr/bin/\u001b[0m\n\u001b[36;1m\u001b[0m\n\u001b[36;1mpytest --cov=$PACKAGE_DIR --cov-report=html -v tests\u001b[0m\n\u001b[36;1mcoverage report --fail-under=$COVERAGE_FAIL_UNDER\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\nenv:\n PACKAGE_DIR: boxmot\n COVERAGE_FAIL_UNDER: 29\n pythonLocation: /opt/hostedtoolcache/Python/3.9.18/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.9.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.9.18/x64/lib\n##[endgroup]\n--2024-01-12 21:22:56-- https://github.com/PINTO0309/onnx2tf/releases/download/1.16.31/flatc.tar.gz\nResolving github.com (github.com)... 140.82.112.3\nConnecting to github.com (github.com)|140.82.112.3|:443... connected.\nHTTP request sent, awaiting response... 302 Found\nLocation: https://objects.githubusercontent.com/github-production-release-asset-2e65be/541831874/29499355-44ab-4fb6-86c8-582f4bad68a3?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIAVCODYLSA53PQK4ZA%2F20240112%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Date=20240112T212256Z&X-Amz-Expires=300&X-Amz-Signature=c2784e798a8e2dc3af201147af969b33ab7179077dd2751f9e53a641373be144&X-Amz-SignedHeaders=host&actor_id=0&key_id=0&repo_id=541831874&response-content-disposition=attachment%3B%20filename%3Dflatc.tar.gz&response-content-type=application%2Foctet-stream [following]\n--2024-01-12 21:22:56-- https://objects.githubusercontent.com/github-production-release-asset-2e65be/541831874/29499355-44ab-4fb6-86c8-582f4bad68a3?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIAVCODYLSA53PQK4ZA%2F20240112%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Date=20240112T212256Z&X-Amz-Expires=300&X-Amz-Signature=c2784e798a8e2dc3af201147af969b33ab7179077dd2751f9e53a641373be144&X-Amz-SignedHeaders=host&actor_id=0&key_id=0&repo_id=541831874&response-content-disposition=attachment%3B%20filename%3Dflatc.tar.gz&response-content-type=application%2Foctet-stream\nResolving objects.githubusercontent.com (objects.githubusercontent.com)... 185.199.108.133, 185.199.110.133, 185.199.109.133, ...\nConnecting to objects.githubusercontent.com (objects.githubusercontent.com)|185.199.108.133|:443... connected.\nHTTP request sent, awaiting response... 200 OK\nLength: 1382707 (1.3M) [application/octet-stream]\nSaving to: \u2018flatc.tar.gz\u2019\n\n 0K .......... .......... .......... .......... .......... 3% 32.7M 0s\n 50K .......... .......... .......... .......... .......... 7% 31.0M 0s\n 100K .......... .......... .......... .......... .......... 11% 79.8M 0s\n 150K .......... .......... .......... .......... .......... 14% 103M 0s\n 200K .......... .......... .......... .......... .......... 18% 177M 0s\n 250K .......... .......... .......... .......... .......... 22% 142M 0s\n 300K .......... .......... .......... .......... .......... 25% 113M 0s\n 350K .......... .......... .......... .......... .......... 29% 86.2M 0s\n 400K .......... .......... .......... .......... .......... 33% 141M 0s\n 450K .......... .......... .......... .......... .......... 37% 160M 0s\n 500K .......... .......... .......... .......... .......... 40% 100M 0s\n 550K .......... .......... .......... .......... .......... 44% 130M 0s\n 600K .......... .......... .......... .......... .......... 48% 157M 0s\n 650K .......... .......... .......... .......... .......... 51% 213M 0s\n 700K .......... .......... .......... .......... .......... 55% 215M 0s\n 750K .......... .......... .......... .......... .......... 59% 210M 0s\n 800K .......... .......... .......... .......... .......... 62% 68.8M 0s\n 850K .......... .......... .......... .......... .......... 66% 172M 0s\n 900K .......... .......... .......... .......... .......... 70% 162M 0s\n 950K .......... .......... .......... .......... .......... 74% 235M 0s\n 1000K .......... .......... .......... .......... .......... 77% 166M 0s\n 1050K .......... .......... .......... .......... .......... 81% 176M 0s\n 1100K .......... .......... .......... .......... .......... 85% 123M 0s\n 1150K .......... .......... .......... .......... .......... 88% 182M 0s\n 1200K .......... .......... .......... .......... .......... 92% 133M 0s\n 1250K .......... .......... .......... .......... .......... 96% 136M 0s\n 1300K .......... .......... .......... .......... .......... 99% 155M 0s\n 1350K 100% 572G=0.01s\n\n2024-01-12 21:22:56 (109 MB/s) - \u2018flatc.tar.gz\u2019 saved [1382707/1382707]\n\nflatc\n============================= test session starts ==============================\nplatform linux -- Python 3.9.18, pytest-7.4.4, pluggy-1.3.0 -- /opt/hostedtoolcache/Python/3.9.18/x64/bin/python\ncachedir: .pytest_cache\nrootdir: /home/runner/work/yolo_tracking/yolo_tracking\nplugins: hydra-core-1.3.2, cov-4.1.0\ncollecting ... collected 14 items\n\ntests/test_exports.py::test_export_torchscript PASSED [ 7%]\ntests/test_exports.py::test_export_onnx PASSED [ 14%]\ntests/test_exports.py::test_export_openvino PASSED [ 21%]\ntests/test_python.py::test_strongsort_instantiation PASSED [ 28%]\ntests/test_python.py::test_botsort_instantiation PASSED [ 35%]\ntests/test_python.py::test_deepocsort_instantiation PASSED [ 42%]\ntests/test_python.py::test_ocsort_instantiation PASSED [ 50%]\ntests/test_python.py::test_bytetrack_instantiation PASSED [ 57%]\ntests/test_python.py::test_deepocsort_output FAILED [ 64%]\ntests/test_python.py::test_ocsort_output PASSED [ 71%]\ntests/test_python.py::test_botsort_output PASSED [ 78%]\ntests/test_python.py::test_bytetrack_output PASSED [ 85%]\ntests/test_python.py::test_strongsort_output PASSED [ 92%]\ntests/test_python.py::test_gsi PASSED [100%]\n\n=================================== FAILURES ===================================\n____________________________ test_deepocsort_output ____________________________\n\n def test_deepocsort_output():\n tracker_conf = get_tracker_config('deepocsort')\n> tracker = create_tracker(\n tracker_type='deepocsort',\n tracker_config=tracker_conf,\n reid_weights=WEIGHTS / 'mobilenetv2_x1_4_dukemtmcreid.pt',\n device='cpu',\n half=False,\n per_class=False,\n asso_func=\"centroid\"\n )\nE TypeError: create_tracker() got an unexpected keyword argument 'asso_func'\n\ntests/test_python.py:53: TypeError\n=============================== warnings summary ===============================\nboxmot/utils/checks.py:5\n /home/runner/work/yolo_tracking/yolo_tracking/boxmot/utils/checks.py:5: DeprecationWarning: pkg_resources is deprecated as an API. See https://setuptools.pypa.io/en/latest/pkg_resources.html\n import pkg_resources\n\n../../../../../opt/hostedtoolcache/Python/3.9.18/x64/lib/python3.9/site-packages/pkg_resources/__init__.py:2868\n /opt/hostedtoolcache/Python/3.9.18/x64/lib/python3.9/site-packages/pkg_resources/__init__.py:2868: DeprecationWarning: Deprecated call to `pkg_resources.declare_namespace('google')`.\n Implementing implicit namespace packages (as specified in PEP 420) is preferred to `pkg_resources.declare_namespace`. See https://setuptools.pypa.io/en/latest/references/keywords.html#keyword-namespace-packages\n declare_namespace(pkg)\n\n../../../../../opt/hostedtoolcache/Python/3.9.18/x64/lib/python3.9/site-packages/pkg_resources/__init__.py:2868\n../../../../../opt/hostedtoolcache/Python/3.9.18/x64/lib/python3.9/site-packages/pkg_resources/__init__.py:2868\n../../../../../opt/hostedtoolcache/Python/3.9.18/x64/lib/python3.9/site-packages/pkg_resources/__init__.py:2868\n../../../../../opt/hostedtoolcache/Python/3.9.18/x64/lib/python3.9/site-packages/pkg_resources/__init__.py:2868\n /opt/hostedtoolcache/Python/3.9.18/x64/lib/python3.9/site-packages/pkg_resources/__init__.py:2868: DeprecationWarning: Deprecated call to `pkg_resources.declare_namespace('sphinxcontrib')`.\n Implementing implicit namespace packages (as specified in PEP 420) is preferred to `pkg_resources.declare_namespace`. See https://setuptools.pypa.io/en/latest/references/keywords.html#keyword-namespace-packages\n declare_namespace(pkg)\n\ntests/test_exports.py::test_export_onnx\n /home/runner/work/yolo_tracking/yolo_tracking/boxmot/appearance/backbones/osnet.py:389: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs!\n if return_featuremaps:\n\ntests/test_exports.py::test_export_openvino\n /opt/hostedtoolcache/Python/3.9.18/x64/lib/python3.9/site-packages/defusedxml/__init__.py:30: DeprecationWarning: defusedxml.cElementTree is deprecated, import from defusedxml.ElementTree instead.\n from . import cElementTree\n\n-- Docs: https://docs.pytest.org/en/stable/how-to/capture-warnings.html\n\n---------- coverage: platform linux, python 3.9.18-final-0 -----------\nCoverage HTML written to dir htmlcov\n\n=========================== short test summary info ============================\nFAILED tests/test_python.py::test_deepocsort_output - TypeError: create_tracker() got an unexpected keyword argument 'asso_func'\n================== 1 failed, 13 passed, 8 warnings in 32.20s ===================\n##[error]Process completed with exit code 1.\n"}, {"step_name": "build (ubuntu-latest, 3.10)/6_Pytest tests.txt", "log": "##[group]Run # needed in TFLite export\n\u001b[36;1m\u001b[0m\n\u001b[36;1m# needed in TFLite export\u001b[0m\n\u001b[36;1mwget https://github.com/PINTO0309/onnx2tf/releases/download/1.16.31/flatc.tar.gz\u001b[0m\n\u001b[36;1mtar -zxvf flatc.tar.gz\u001b[0m\n\u001b[36;1msudo chmod +x flatc\u001b[0m\n\u001b[36;1msudo mv flatc /usr/bin/\u001b[0m\n\u001b[36;1m\u001b[0m\n\u001b[36;1mpytest --cov=$PACKAGE_DIR --cov-report=html -v tests\u001b[0m\n\u001b[36;1mcoverage report --fail-under=$COVERAGE_FAIL_UNDER\u001b[0m\nshell: /usr/bin/bash --noprofile --norc -e -o pipefail {0}\nenv:\n PACKAGE_DIR: boxmot\n COVERAGE_FAIL_UNDER: 29\n pythonLocation: /opt/hostedtoolcache/Python/3.10.13/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.10.13/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.10.13/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.10.13/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.10.13/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.10.13/x64/lib\n##[endgroup]\n--2024-01-12 21:23:22-- https://github.com/PINTO0309/onnx2tf/releases/download/1.16.31/flatc.tar.gz\nResolving github.com (github.com)... 192.30.255.113\nConnecting to github.com (github.com)|192.30.255.113|:443... connected.\nHTTP request sent, awaiting response... 302 Found\nLocation: https://objects.githubusercontent.com/github-production-release-asset-2e65be/541831874/29499355-44ab-4fb6-86c8-582f4bad68a3?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIAVCODYLSA53PQK4ZA%2F20240112%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Date=20240112T212323Z&X-Amz-Expires=300&X-Amz-Signature=9a9df5c41bff888036ebd61dd2ceef9adb6937dc39984e23a2a2018e1678ed8f&X-Amz-SignedHeaders=host&actor_id=0&key_id=0&repo_id=541831874&response-content-disposition=attachment%3B%20filename%3Dflatc.tar.gz&response-content-type=application%2Foctet-stream [following]\n--2024-01-12 21:23:23-- https://objects.githubusercontent.com/github-production-release-asset-2e65be/541831874/29499355-44ab-4fb6-86c8-582f4bad68a3?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIAVCODYLSA53PQK4ZA%2F20240112%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Date=20240112T212323Z&X-Amz-Expires=300&X-Amz-Signature=9a9df5c41bff888036ebd61dd2ceef9adb6937dc39984e23a2a2018e1678ed8f&X-Amz-SignedHeaders=host&actor_id=0&key_id=0&repo_id=541831874&response-content-disposition=attachment%3B%20filename%3Dflatc.tar.gz&response-content-type=application%2Foctet-stream\nResolving objects.githubusercontent.com (objects.githubusercontent.com)... 185.199.110.133, 185.199.108.133, 185.199.109.133, ...\nConnecting to objects.githubusercontent.com (objects.githubusercontent.com)|185.199.110.133|:443... connected.\nHTTP request sent, awaiting response... 200 OK\nLength: 1382707 (1.3M) [application/octet-stream]\nSaving to: \u2018flatc.tar.gz\u2019\n\n 0K .......... .......... .......... .......... .......... 3% 18.7M 0s\n 50K .......... .......... .......... .......... .......... 7% 71.0M 0s\n 100K .......... .......... .......... .......... .......... 11% 58.4M 0s\n 150K .......... .......... .......... .......... .......... 14% 115M 0s\n 200K .......... .......... .......... .......... .......... 18% 143M 0s\n 250K .......... .......... .......... .......... .......... 22% 68.8M 0s\n 300K .......... .......... .......... .......... .......... 25% 143M 0s\n 350K .......... .......... .......... .......... .......... 29% 109M 0s\n 400K .......... .......... .......... .......... .......... 33% 158M 0s\n 450K .......... .......... .......... .......... .......... 37% 107M 0s\n 500K .......... .......... .......... .......... .......... 40% 163M 0s\n 550K .......... .......... .......... .......... .......... 44% 125M 0s\n 600K .......... .......... .......... .......... .......... 48% 111M 0s\n 650K .......... .......... .......... .......... .......... 51% 151M 0s\n 700K .......... .......... .......... .......... .......... 55% 118M 0s\n 750K .......... .......... .......... .......... .......... 59% 152M 0s\n 800K .......... .......... .......... .......... .......... 62% 199M 0s\n 850K .......... .......... .......... .......... .......... 66% 204M 0s\n 900K .......... .......... .......... .......... .......... 70% 207M 0s\n 950K .......... .......... .......... .......... .......... 74% 186M 0s\n 1000K .......... .......... .......... .......... .......... 77% 199M 0s\n 1050K .......... .......... .......... .......... .......... 81% 210M 0s\n 1100K .......... .......... .......... .......... .......... 85% 193M 0s\n 1150K .......... .......... .......... .......... .......... 88% 208M 0s\n 1200K .......... .......... .......... .......... .......... 92% 210M 0s\n 1250K .......... .......... .......... .......... .......... 96% 203M 0s\n 1300K .......... .......... .......... .......... .......... 99% 196M 0s\n 1350K 100% 572G=0.01s\n\n2024-01-12 21:23:23 (110 MB/s) - \u2018flatc.tar.gz\u2019 saved [1382707/1382707]\n\nflatc\n============================= test session starts ==============================\nplatform linux -- Python 3.10.13, pytest-7.4.4, pluggy-1.3.0 -- /opt/hostedtoolcache/Python/3.10.13/x64/bin/python\ncachedir: .pytest_cache\nrootdir: /home/runner/work/yolo_tracking/yolo_tracking\nplugins: hydra-core-1.3.2, cov-4.1.0\ncollecting ... collected 14 items\n\ntests/test_exports.py::test_export_torchscript PASSED [ 7%]\ntests/test_exports.py::test_export_onnx PASSED [ 14%]\ntests/test_exports.py::test_export_openvino PASSED [ 21%]\ntests/test_python.py::test_strongsort_instantiation PASSED [ 28%]\ntests/test_python.py::test_botsort_instantiation PASSED [ 35%]\ntests/test_python.py::test_deepocsort_instantiation PASSED [ 42%]\ntests/test_python.py::test_ocsort_instantiation PASSED [ 50%]\ntests/test_python.py::test_bytetrack_instantiation PASSED [ 57%]\ntests/test_python.py::test_deepocsort_output FAILED [ 64%]\ntests/test_python.py::test_ocsort_output PASSED [ 71%]\ntests/test_python.py::test_botsort_output PASSED [ 78%]\ntests/test_python.py::test_bytetrack_output PASSED [ 85%]\ntests/test_python.py::test_strongsort_output PASSED [ 92%]\ntests/test_python.py::test_gsi PASSED [100%]\n\n=================================== FAILURES ===================================\n____________________________ test_deepocsort_output ____________________________\n\n def test_deepocsort_output():\n tracker_conf = get_tracker_config('deepocsort')\n> tracker = create_tracker(\n tracker_type='deepocsort',\n tracker_config=tracker_conf,\n reid_weights=WEIGHTS / 'mobilenetv2_x1_4_dukemtmcreid.pt',\n device='cpu',\n half=False,\n per_class=False,\n asso_func=\"centroid\"\n )\nE TypeError: create_tracker() got an unexpected keyword argument 'asso_func'\n\ntests/test_python.py:53: TypeError\n=============================== warnings summary ===============================\nboxmot/utils/checks.py:5\n /home/runner/work/yolo_tracking/yolo_tracking/boxmot/utils/checks.py:5: DeprecationWarning: pkg_resources is deprecated as an API. See https://setuptools.pypa.io/en/latest/pkg_resources.html\n import pkg_resources\n\n../../../../../opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/pkg_resources/__init__.py:2868\n /opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/pkg_resources/__init__.py:2868: DeprecationWarning: Deprecated call to `pkg_resources.declare_namespace('google')`.\n Implementing implicit namespace packages (as specified in PEP 420) is preferred to `pkg_resources.declare_namespace`. See https://setuptools.pypa.io/en/latest/references/keywords.html#keyword-namespace-packages\n declare_namespace(pkg)\n\n../../../../../opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/pkg_resources/__init__.py:2868\n../../../../../opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/pkg_resources/__init__.py:2868\n../../../../../opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/pkg_resources/__init__.py:2868\n../../../../../opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/pkg_resources/__init__.py:2868\n /opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/pkg_resources/__init__.py:2868: DeprecationWarning: Deprecated call to `pkg_resources.declare_namespace('sphinxcontrib')`.\n Implementing implicit namespace packages (as specified in PEP 420) is preferred to `pkg_resources.declare_namespace`. See https://setuptools.pypa.io/en/latest/references/keywords.html#keyword-namespace-packages\n declare_namespace(pkg)\n\ntests/test_exports.py::test_export_onnx\n /home/runner/work/yolo_tracking/yolo_tracking/boxmot/appearance/backbones/osnet.py:389: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs!\n if return_featuremaps:\n\ntests/test_exports.py::test_export_openvino\n /opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/defusedxml/__init__.py:30: DeprecationWarning: defusedxml.cElementTree is deprecated, import from defusedxml.ElementTree instead.\n from . import cElementTree\n\n-- Docs: https://docs.pytest.org/en/stable/how-to/capture-warnings.html\n\n---------- coverage: platform linux, python 3.10.13-final-0 ----------\nCoverage HTML written to dir htmlcov\n\n=========================== short test summary info ============================\nFAILED tests/test_python.py::test_deepocsort_output - TypeError: create_tracker() got an unexpected keyword argument 'asso_func'\n================== 1 failed, 13 passed, 8 warnings in 31.21s ===================\n##[error]Process completed with exit code 1.\n"}], "diff": "diff --git a/tests/test_python.py b/tests/test_python.py\nindex 027065c..fe049e8 100644\n--- a/tests/test_python.py\n+++ b/tests/test_python.py\n@@ -57,8 +57,8 @@ def test_deepocsort_output():\n device='cpu',\n half=False,\n per_class=False,\n- asso_func=\"centroid\"\n )\n+ tracker.asso_func=\"centroid\"\n rgb = np.random.randint(255, size=(640, 640, 3), dtype=np.uint8)\n det = np.array([[144, 212, 578, 480, 0.82, 0],\n [425, 281, 576, 472, 0.56, 65]])\n", "difficulty": 2, "changed_files": ["tests/test_python.py"], "commit_link": "https://github.com/mikel-brostrom/yolo_tracking/tree/b4cd344b4fd4316112beb324fe04d703fd2b7254"}
data/python/b639adb.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"language": "Python", "id": 20, "repo_owner": "skypilot-org", "repo_name": "skypilot", "head_branch": "master", "workflow_name": "Pylint", "workflow_filename": "pylint.yml", "workflow_path": ".github/workflows/pylint.yml", "contributor": "jackyk02", "sha_fail": "b639adb71066410b3b12d97a674ee7fcb51e9980", "sha_success": "a1934063075cb08f0a0041a73026d8ea803617b3", "workflow": "name: Pylint\n\non:\n # Trigger the workflow on push or pull request,\n # but only for the main branch\n push:\n branches:\n - master\n - 'releases/**'\n pull_request:\n branches:\n - master\n - 'releases/**'\n\njobs:\n pylint:\n runs-on: ubuntu-latest\n strategy:\n matrix:\n python-version: [\"3.8\"]\n steps:\n - uses: actions/checkout@v3\n - name: Set up Python ${{ matrix.python-version }}\n uses: actions/setup-python@v4\n with:\n python-version: ${{ matrix.python-version }}\n - name: Install dependencies\n run: |\n python -m pip install --upgrade pip\n pip install \".[all]\"\n pip install pylint==2.8.2\n pip install pylint-quotes==0.2.3\n - name: Analysing the code with pylint\n run: |\n pylint --load-plugins pylint_quotes sky\n", "logs": [{"step_name": "pylint (3.8)/5_Analysing the code with pylint.txt", "log": "##[group]Run pylint --load-plugins pylint_quotes sky\n\u001b[36;1mpylint --load-plugins pylint_quotes sky\u001b[0m\nshell: /usr/bin/bash -e {0}\nenv:\n pythonLocation: /opt/hostedtoolcache/Python/3.8.18/x64\n PKG_CONFIG_PATH: /opt/hostedtoolcache/Python/3.8.18/x64/lib/pkgconfig\n Python_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n Python2_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n Python3_ROOT_DIR: /opt/hostedtoolcache/Python/3.8.18/x64\n LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.8.18/x64/lib\n##[endgroup]\n************* Module sky.resources\nsky/resources.py:839:0: C0301: Line too long (81/80) (line-too-long)\n\n------------------------------------\nYour code has been rated at 10.00/10\n\n##[error]Process completed with exit code 16.\n"}], "diff": "diff --git a/sky/resources.py b/sky/resources.py\nindex 2f7aebce..3056e104 100644\n--- a/sky/resources.py\n+++ b/sky/resources.py\n@@ -836,11 +836,11 @@ class Resources:\n image_size = self.cloud.get_image_size(image_id, region)\n if image_size >= self.disk_size:\n with ux_utils.print_exception_no_traceback():\n- size_compare = 'larger than' if image_size > self.disk_size \\\n+ size_comp = 'larger than' if image_size > self.disk_size \\\n else 'equal to'\n raise ValueError(\n f'Image {image_id!r} is {image_size}GB, which is '\n- f'{size_compare} the specified disk_size: '\n+ f'{size_comp} the specified disk_size: '\n f'{self.disk_size} GB. Please specify a larger '\n 'disk_size to use this image.')\n \n", "difficulty": 0, "changed_files": ["sky/resources.py"], "commit_link": "https://github.com/skypilot-org/skypilot/tree/b639adb71066410b3b12d97a674ee7fcb51e9980"}