Skip to content

Commit

Permalink
Fix pytests
Browse files Browse the repository at this point in the history
  • Loading branch information
sunnycase committed Mar 4, 2025
1 parent 94e513f commit d682811
Show file tree
Hide file tree
Showing 10 changed files with 59 additions and 27 deletions.
3 changes: 2 additions & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -222,6 +222,7 @@ def build_cmake(self, ext: Extension):

build_type = 'Debug' if self.debug else 'Release'
build_dir = os.path.join(self.build_temp, build_type)
python_root = os.path.dirname(sys.executable).replace("\\", "/")

if not os.path.exists(self.build_temp):
os.makedirs(self.build_temp)
Expand All @@ -237,7 +238,7 @@ def build_cmake(self, ext: Extension):
self.spawn(["conan", "remote", "add", "sunnycase", "https://conan.sunnycase.moe", "--index", "0", "--force"])
self.spawn(["conan", "install", ext.sourcedir, "--build=missing", "-s",
"build_type=" + build_type, f"-pr:a={host_toolchain_path}",
"-o", "&:runtime=False", "-o", "&:python=True", "-o", "&:tests=False", "-o", f"&:python_root={os.path.dirname(sys.executable)}",
"-o", "&:runtime=False", "-o", "&:python=True", "-o", "&:tests=False", "-o", f"&:python_root={python_root}",
"-c", f"tools.cmake.cmake_layout:build_folder={self.build_temp}"])
self.spawn(["cmake", "-B", build_dir, "-S", ext.sourcedir, "--preset", "conan-release"])
self.spawn(["cmake", "--build", build_dir])
Expand Down
16 changes: 13 additions & 3 deletions src/Nncase.Core/CompilerServices.cs
Original file line number Diff line number Diff line change
Expand Up @@ -208,7 +208,7 @@ public interface ICompilerServicesProvider

Expr SimplifyForDimension(Expr value);

long[] GetMaxShape(Shape shape);
bool TryGetMaxShape(Shape shape, [MaybeNullWhen(false)] out long[] maxShape);
}

internal interface ICompilerServicesProviderInternal
Expand Down Expand Up @@ -504,7 +504,17 @@ public static void DumpPatternIR(Expr expr, string prefix, string dumpDir) =>

public static Expr SimplifyForDimension(Expr value) => Provider.SimplifyForDimension(value);

public static long[] GetMaxShape(Shape shape) => Provider.GetMaxShape(shape);
public static bool TryGetMaxShape(Shape shape, [MaybeNullWhen(false)] out long[] maxShape) => Provider.TryGetMaxShape(shape, out maxShape);

public static long[] GetMaxShape(Shape shape)
{
if (TryGetMaxShape(shape, out var maxShape))
{
return maxShape;
}

throw new InvalidOperationException("Failed to get max shape.");
}

public static Expr FastSimplifyForDimension(Expr value)
{
Expand Down Expand Up @@ -713,5 +723,5 @@ public IEGraph ERewrite(IEGraph graph, IEnumerable<IRewriteRule> rules, RunPassC

public Expr SimplifyForDimension(Expr value) => _simplifyProvider.SimplifyForDimension(value);

public long[] GetMaxShape(Shape shape) => _simplifyProvider.GetMaxShape(shape);
public bool TryGetMaxShape(Shape shape, [MaybeNullWhen(false)] out long[] maxShape) => _simplifyProvider.TryGetMaxShape(shape, out maxShape);
}
2 changes: 1 addition & 1 deletion src/Nncase.Core/Evaluator/Metric.cs
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,7 @@ public static UInt128 GetFLOPs(IRType type, long scale = 1)
{
return type switch
{
TensorType t => (UInt128)t.Shape.ProdWithDynamicAsMaxValue(scale),
TensorType t => (UInt128)t.Shape.ProdWithDynamicAsMaxValue(scale: scale),
TupleType t => t.Fields.Sum(f => GetFLOPs(f, scale)),
_ => 0,
};
Expand Down
12 changes: 9 additions & 3 deletions src/Nncase.Core/IR/Shape.cs
Original file line number Diff line number Diff line change
Expand Up @@ -442,10 +442,16 @@ public FixedAndDynamicDimension ProdFixedAndDynamic()
return new(fixedValue, dynamicValue);
}

public long ProdWithDynamicAsMaxValue(long scale = 1)
public long ProdWithDynamicAsMaxValue(int dynamicValue = short.MaxValue, long scale = 1)
{
var maxShape = CompilerServices.GetMaxShape(this);
return Enumerable.Range(0, Rank).Aggregate(scale, (acc, x) => acc * maxShape[x]);
if (CompilerServices.TryGetMaxShape(this, out var maxShape))
{
return Enumerable.Range(0, Rank).Aggregate(scale, (acc, x) => acc * maxShape[x]);
}
else
{
return dynamicValue;
}
}

/// <summary>
Expand Down
3 changes: 2 additions & 1 deletion src/Nncase.Core/Passes/IRewriteProvider.cs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@

using System;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
Expand Down Expand Up @@ -54,5 +55,5 @@ public interface ISimplifyProvider
{
Expr SimplifyForDimension(Expr expr);

long[] GetMaxShape(Shape shape);
bool TryGetMaxShape(Shape shape, [MaybeNullWhen(false)] out long[] maxShape);
}
13 changes: 8 additions & 5 deletions src/Nncase.Passes/SimplifyProvider.cs
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
using System.Collections.Generic;
using System.CommandLine;
using System.CommandLine.Invocation;
using System.Diagnostics.CodeAnalysis;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Threading.Tasks;
Expand Down Expand Up @@ -96,14 +97,15 @@ public Expr SimplifyForDimension(Expr expr)
#endif
}

public long[] GetMaxShape(Shape shape)
public bool TryGetMaxShape(Shape shape, [MaybeNullWhen(false)] out long[] maxShape)
{
if (shape.IsFixed)
{
return shape.ToValueArray();
maxShape = shape.ToValueArray();
return true;
}

var maxShape = new long[shape.Rank];
maxShape = new long[shape.Rank];
if (!shape.Metadata.Range.HasValue)
{
new InferRangeVisitor().Visit(shape);
Expand All @@ -114,12 +116,13 @@ public long[] GetMaxShape(Shape shape)
var max = shape.Dimensions[i].Metadata.Range!.Value.Max;
if (max >= int.MaxValue)
{
throw new ArgumentOutOfRangeException($"shape dimension is too large: {max}");
maxShape = null;
return false;
}

maxShape[i] = (long)max;
}

return maxShape;
return true;
}
}
15 changes: 13 additions & 2 deletions src/Nncase.Passes/Transforms/InferRangePass.cs
Original file line number Diff line number Diff line change
Expand Up @@ -75,8 +75,19 @@ protected override ValueRange<double> VisitLeafCall(Call expr)

protected override ValueRange<double> VisitLeafTensorConst(TensorConst expr)
{
var value = expr.Value.ToArray<double>();
return new ValueRange<double>(value.Min(), value.Max());
// QuantParam
if (expr.Value.ElementType is QuantParamType)
{
var value = expr.Value.ToArray<QuantParam>();
var min = Math.Min(value.Min(x => x.ZeroPoint), value.Min(x => x.Scale));
var max = Math.Max(value.Max(x => x.ZeroPoint), value.Max(x => x.Scale));
return new ValueRange<double>(min, max);
}
else
{
var value = expr.Value.ToArray<double>();
return new ValueRange<double>(value.Min(), value.Max());
}
}

protected override ValueRange<double> VisitLeafShape(Shape expr)
Expand Down
5 changes: 2 additions & 3 deletions tests/importer/onnx_/basic/test_clip.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,16 +112,15 @@ def _make_module(in_shape, minimum, maximum, op_version, value_format):

minimums = [
None,
# -1.0,
-1.0,
]

maximums = [
# None,
None,
6.0
]

op_versions_and_value_formats = [
[1, 'attribute'],
[6, 'attribute'],
[11, 'initializer'],
[11, 'constant'],
Expand Down
17 changes: 9 additions & 8 deletions tests/importer/onnx_/basic/test_dequantizelinear.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@

import pytest
import onnx
import numpy as np
from onnx import helper
from onnx import AttributeProto, TensorProto, GraphProto, numpy_helper
from onnx_test_runner import OnnxTestRunner
Expand All @@ -34,8 +35,8 @@ def _make_module(in_shape, input_type, scale, zp):
scale = helper.make_tensor(
'scale',
TensorProto.FLOAT,
dims=[len(scale)],
vals=scale
dims=[],
vals=[scale]
)
inputs.append('scale')
initializers.append(scale)
Expand All @@ -45,8 +46,8 @@ def _make_module(in_shape, input_type, scale, zp):
zero_point = helper.make_tensor(
'zero_point',
input_type,
dims=[len(zp)],
vals=zp
dims=[],
vals=[zp]
)
inputs.append('zero_point')
initializers.append(zero_point)
Expand Down Expand Up @@ -87,13 +88,13 @@ def _make_module(in_shape, input_type, scale, zp):
]

scales = [
[0.02],
0.02,
]

zero_points = [
None,
[100],
[0]
100,
0
]


Expand All @@ -103,7 +104,7 @@ def _make_module(in_shape, input_type, scale, zp):
@pytest.mark.parametrize('zero_point', zero_points)
def test_dequantizelinear(in_shape, input_type, scale, zero_point, request):

if input_type == TensorProto.INT8 and zero_point is not None and zero_point[0] != 0:
if input_type == TensorProto.INT8 and zero_point is not None and zero_point != 0:
return

model_def = _make_module(in_shape, input_type, scale, zero_point)
Expand Down

0 comments on commit d682811

Please sign in to comment.