大家好,又见面了,我是你们的朋友全栈君。
[TensorFlowJS只如初见]实战三·使用TensorFlowJS拟合曲线
-
问题描述
拟合y= x*x -2x +3 + 0.1(-1到1的随机值) 曲线
给定x范围(0,3) -
问题分析
在直线拟合博客中,我们使用最简单的y=wx+b的模型成功拟合了一条直线,现在我们在进一步进行曲线的拟合。简单的y=wx+b模型已经无法满足我们的需求,需要利用更多的神经元来解决问题了。 -
代码
<html>
<head>
<script src="https://cdn.jsdelivr.net/npm/@tensorflow/tfjs"> </script>
</head>
<body>
<button class="btn btn-primary" onclick="fnRun0();">开始0</button>
<div id="p0Id">out0</div>
<button class="btn btn-primary" onclick="fnRun1();">开始1</button>
<div id="p1Id">out1</div>
<button class="btn btn-primary" onclick="fnRun2();">开始2</button>
<div id="p2Id">out2</div>
</body>
<script> function get_ys(xs) {
var ys = new Array(); for (var i = 0; i < xs.length; i++) {
ys[i] = xs[i] * xs[i] - 2 * xs[i] + 3 + (0.001 * (2 * Math.random() - 1)); } return (ys); } var xs = new Array(); for (var i = 0; i < 200; i++) {
xs[i] = 0.01 * i; } var ys = get_ys(xs); const xst = tf.tensor(xs, [xs.length, 1]); const yst = tf.tensor(ys, [ys.length, 1]); const a = tf.variable(tf.scalar(Math.random())); const b = tf.variable(tf.scalar(Math.random())); const c = tf.variable(tf.scalar(Math.random())); function predict(x) {
return tf.tidy(() => {
return a.mul(x.square()) .add(b.mul(x)) .add(c); }); } function loss(prediction, labels) {
const error = prediction.sub(labels).square().mean(); return error; } const numIterations = 1000; const learningRate = 0.12; const optimizer = tf.train.sgd(learningRate); function train(xst, yst, numIterations) {
for (var iter = 0; iter < numIterations+1; iter++) {
optimizer.minimize(() => {
const predsYs = predict(xst); if(iter%100==0){
console.log(iter+" steps loss is "+loss(predsYs, yst)); } return loss(predsYs, yst); }); } const test_xs = tf.tensor([0.5,1,1.5],[3,1]); predict(test_xs).print(); } train(xst,yst,numIterations); </script>
</html>
- 输出结果
进行1000轮训练以后,我们输入[0.5,1,1.5]进行预测,得到结果为
[[2.2503195],
[2.0105994],
[2.2543631]]
较好地拟合了曲线。
log
"0 steps loss is Tensor
1.7456094026565552"
"100 steps loss is Tensor
0.08455191552639008"
"200 steps loss is Tensor
0.040247201919555664"
"300 steps loss is Tensor
0.0191580131649971"
"400 steps loss is Tensor
0.009119458496570587"
"500 steps loss is Tensor
0.004341088235378265"
"600 steps loss is Tensor
0.0020665652118623257"
"700 steps loss is Tensor
0.0009838765254244208"
"800 steps loss is Tensor
0.0004685141902882606"
"900 steps loss is Tensor
0.00022319876006804407"
"1000 steps loss is Tensor
0.00010642936103977263"
"Tensor
[[2.2503195],
[2.0105994],
[2.2543631]]"
发布者:全栈程序员-用户IM,转载请注明出处:https://javaforall.cn/160488.html原文链接:https://javaforall.cn
【正版授权,激活自己账号】: Jetbrains全家桶Ide使用,1年售后保障,每天仅需1毛
【官方授权 正版激活】: 官方授权 正版激活 支持Jetbrains家族下所有IDE 使用个人JB账号...