Datasets:

Modalities:
Tabular
Text
Formats:
json
Size:
< 1K
ArXiv:
DOI:
Libraries:
Datasets
Dask
galtimur's picture
test data add
d822266
raw
history blame
10.9 kB
{"head_branch": "add-hardswish-to-nn", "contributor": "james77777778", "sha_fail": "17a3c1dd96375907ff0d0458977600577df3b4c6", "sha_success": "9d86dee1022aecc12faaccf57f2acd405164db1f", "language": "Python", "repo_owner": "keras-team", "repo_name": "keras", "workflow_name": "Tests", "workflow_filename": "actions.yml", "workflow_path": ".github/workflows/actions.yml", "workflow": "name: Tests\n\non:\n push:\n branches: [ master ]\n pull_request:\n release:\n types: [created]\n\npermissions:\n contents: read\n\njobs:\n build:\n strategy:\n fail-fast: false\n matrix:\n python-version: [3.9]\n backend: [tensorflow, jax, torch, numpy]\n name: Run tests\n runs-on: ubuntu-latest\n env:\n PYTHON: ${{ matrix.python-version }}\n KERAS_HOME: .github/workflows/config/${{ matrix.backend }}\n steps:\n - uses: actions/checkout@v4\n - name: Check for changes in keras/applications\n uses: dorny/paths-filter@v2\n id: filter\n with:\n filters: |\n applications:\n - 'keras/applications/**'\n - name: Set up Python\n uses: actions/setup-python@v4\n with:\n python-version: ${{ matrix.python-version }}\n - name: Get pip cache dir\n id: pip-cache\n run: |\n python -m pip install --upgrade pip setuptools\n echo \"dir=$(pip cache dir)\" >> $GITHUB_OUTPUT\n - name: pip cache\n uses: actions/cache@v3\n with:\n path: ${{ steps.pip-cache.outputs.dir }}\n key: ${{ runner.os }}-pip-${{ hashFiles('setup.py') }}-${{ hashFiles('requirements.txt') }}\n - name: Install dependencies\n run: |\n pip install -r requirements.txt --progress-bar off --upgrade\n pip uninstall -y keras keras-nightly\n pip install -e \".\" --progress-bar off --upgrade\n - name: Test applications with pytest\n if: ${{ steps.filter.outputs.applications == 'true' }}\n run: |\n pytest keras/applications --cov=keras/applications\n coverage xml --include='keras/applications/*' -o apps-coverage.xml\n - name: Codecov keras.applications\n if: ${{ steps.filter.outputs.applications == 'true' }}\n uses: codecov/codecov-action@v3\n with:\n env_vars: PYTHON,KERAS_HOME\n flags: keras.applications,keras.applications-${{ matrix.backend }}\n files: apps-coverage.xml\n fail_ci_if_error: false\n - name: Test integrations\n if: ${{ matrix.backend != 'numpy'}}\n run: |\n python integration_tests/import_test.py\n - name: Test TF-specific integrations\n if: ${{ matrix.backend == 'tensorflow'}}\n run: |\n python integration_tests/tf_distribute_training_test.py\n - name: Test Torch-specific integrations\n if: ${{ matrix.backend == 'torch'}}\n run: |\n pytest integration_tests/torch_workflow_test.py\n - name: Test with pytest\n run: |\n pytest keras --ignore keras/applications --cov=keras\n coverage xml --omit='keras/applications/*' -o core-coverage.xml\n - name: Codecov keras\n uses: codecov/codecov-action@v3\n with:\n env_vars: PYTHON,KERAS_HOME\n flags: keras,keras-${{ matrix.backend }}\n files: core-coverage.xml\n fail_ci_if_error: false\n\n format:\n name: Check the code format\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v4\n - name: Set up Python 3.9\n uses: actions/setup-python@v4\n with:\n python-version: '3.9'\n - name: Get pip cache dir\n id: pip-cache\n run: |\n python -m pip install --upgrade pip setuptools\n echo \"dir=$(pip cache dir)\" >> $GITHUB_OUTPUT\n - name: pip cache\n uses: actions/cache@v3\n with:\n path: ${{ steps.pip-cache.outputs.dir }}\n key: ${{ runner.os }}-pip-${{ hashFiles('setup.py') }}-${{ hashFiles('requirements.txt') }}\n - name: Install dependencies\n run: |\n pip install -r requirements.txt --progress-bar off --upgrade\n pip uninstall -y keras keras-nightly\n pip install -e \".\" --progress-bar off --upgrade\n - name: Lint\n run: bash shell/lint.sh\n", "logs": "keras/trainers/__init__.py 0 0 0 0 100%\nkeras/trainers/compile_utils.py 367 55 289 41 81%\nkeras/trainers/data_adapters/__init__.py 58 17 44 9 65%\nkeras/trainers/data_adapters/array_data_adapter.py 141 15 58 9 88%\nkeras/trainers/data_adapters/data_adapter.py 13 0 0 0 100%\nkeras/trainers/data_adapters/data_adapter_utils.py 92 19 61 13 74%\nkeras/trainers/data_adapters/generator_data_adapter.py 63 16 16 4 70%\nkeras/trainers/data_adapters/py_dataset_adapter.py 254 49 82 17 76%\nkeras/trainers/data_adapters/tf_dataset_adapter.py 57 1 24 1 98%\nkeras/trainers/data_adapters/torch_data_loader_adapter.py 46 30 10 0 29%\nkeras/trainers/epoch_iterator.py 54 3 28 7 88%\nkeras/trainers/trainer.py 194 25 92 15 83%\nkeras/utils/__init__.py 24 0 0 0 100%\nkeras/utils/argument_validation.py 43 10 26 5 78%\nkeras/utils/audio_dataset_utils.py 87 16 52 4 83%\nkeras/utils/backend_utils.py 37 10 12 2 63%\nkeras/utils/code_stats.py 40 3 34 2 91%\nkeras/utils/dataset_utils.py 295 64 216 47 74%\nkeras/utils/dtype_utils.py 25 0 16 0 100%\nkeras/utils/file_utils.py 221 52 129 23 71%\nkeras/utils/image_dataset_utils.py 97 9 54 9 87%\nkeras/utils/image_utils.py 150 73 80 13 45%\nkeras/utils/io_utils.py 34 0 10 0 100%\nkeras/utils/jax_utils.py 7 3 4 1 45%\nkeras/utils/model_visualization.py 193 169 90 0 8%\nkeras/utils/module_utils.py 32 1 6 1 95%\nkeras/utils/naming.py 34 1 8 1 95%\nkeras/utils/nest.py 42 9 18 3 77%\nkeras/utils/numerical_utils.py 58 4 26 5 89%\nkeras/utils/progbar.py 133 24 60 8 79%\nkeras/utils/python_utils.py 67 5 32 5 90%\nkeras/utils/rng_utils.py 16 3 6 3 73%\nkeras/utils/sequence_utils.py 41 8 24 6 78%\nkeras/utils/shape_utils.py 15 1 19 1 94%\nkeras/utils/summary_utils.py 212 39 116 17 77%\nkeras/utils/text_dataset_utils.py 68 4 40 5 90%\nkeras/utils/tf_utils.py 68 24 38 6 62%\nkeras/utils/timeseries_dataset_utils.py 62 4 48 5 92%\nkeras/utils/torch_utils.py 56 38 16 0 28%\nkeras/utils/traceback_utils.py 107 63 48 4 35%\nkeras/utils/tracking.py 114 13 62 7 81%\nkeras/version.py 5 1 0 0 80%\n----------------------------------------------------------------------------------------------\nTOTAL 37070 13646 14503 1409 61%\n\n=========================== short test summary info ============================\nFAILED keras/ops/nn_test.py::NNOpsDtypeTest::test_hard_sigmoid_bfloat16 - numpy.exceptions.DTypePromotionError: The DTypes <class 'numpy.dtype[bfloat16]'> and <class 'numpy.dtypes.Float16DType'> do not have a common DType. For example they cannot be stored in a single array unless the dtype is `object`.\nFAILED keras/ops/nn_test.py::NNOpsDtypeTest::test_hard_swish_bfloat16 - numpy.exceptions.DTypePromotionError: The DTypes <class 'numpy.dtype[bfloat16]'> and <class 'numpy.dtypes.Float16DType'> do not have a common DType. For example they cannot be stored in a single array unless the dtype is `object`.\n====== 2 failed, 8384 passed, 219 skipped, 1 xpassed in 309.91s (0:05:09) ======\n##[error]Process completed with exit code 1.\n", "diff": "diff --git a/keras/backend/tensorflow/nn.py b/keras/backend/tensorflow/nn.py\nindex 0e979656c7..e74155f6a4 100644\n--- a/keras/backend/tensorflow/nn.py\n+++ b/keras/backend/tensorflow/nn.py\n@@ -9,6 +9,7 @@ from keras.backend.common.backend_utils import (\n )\n from keras.backend.config import epsilon\n from keras.backend.tensorflow.core import cast\n+from keras.backend.tensorflow.core import convert_to_tensor\n \n \n def relu(x):\n@@ -51,7 +52,8 @@ def leaky_relu(x, negative_slope=0.2):\n \n \n def hard_sigmoid(x):\n- x = x / 6.0 + 0.5\n+ x = convert_to_tensor(x)\n+ x = x / tf.constant(6.0, dtype=x.dtype) + tf.constant(0.5, dtype=x.dtype)\n return tf.clip_by_value(x, 0.0, 1.0)\n \n \ndiff --git a/keras/ops/nn_test.py b/keras/ops/nn_test.py\nindex f3f464f816..3a15fca0eb 100644\n--- a/keras/ops/nn_test.py\n+++ b/keras/ops/nn_test.py\n@@ -1,6 +1,7 @@\n import numpy as np\n import pytest\n from absl.testing import parameterized\n+from tensorflow.python.ops.numpy_ops import np_config\n \n from keras import backend\n from keras import layers\n@@ -24,6 +25,9 @@ from keras.ops import nn as knn\n from keras.ops import numpy as knp\n from keras.testing.test_utils import named_product\n \n+# TODO: remove reliance on this (or alternatively, turn it on by default).\n+np_config.enable_numpy_behavior()\n+\n \n class NNOpsDynamicShapeTest(testing.TestCase, parameterized.TestCase):\n def test_relu(self):\n"}