Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Binary file added Chapter01/RPL/714222073/GreatLearning.jpg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
27 changes: 27 additions & 0 deletions Chapter01/RPL/714222073/go.mod
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
module github.com/dinilubis/ai/Chapter01/RPL/714222073

go 1.20

require gorgonia.org/gorgonia v0.9.17

require (
github.com/apache/arrow/go/arrow v0.0.0-20210105145422-88aaea5262db // indirect
github.com/awalterschulze/gographviz v0.0.0-20190221210632-1e9ccb565bca // indirect
github.com/chewxy/hm v1.0.0 // indirect
github.com/chewxy/math32 v1.0.7-0.20210223031236-a3549c8cb6a9 // indirect
github.com/gogo/protobuf v1.3.1 // indirect
github.com/golang/protobuf v1.4.3 // indirect
github.com/google/flatbuffers v1.12.0 // indirect
github.com/leesper/go_rng v0.0.0-20171009123644-5344a9259b21 // indirect
github.com/pkg/errors v0.9.1 // indirect
github.com/xtgo/set v1.0.0 // indirect
go4.org/unsafe/assume-no-moving-gc v0.0.0-20201222180813-1025295fd063 // indirect
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect
gonum.org/v1/gonum v0.8.2 // indirect
google.golang.org/protobuf v1.25.0 // indirect
gorgonia.org/cu v0.9.3 // indirect
gorgonia.org/dawson v1.2.0 // indirect
gorgonia.org/tensor v0.9.17 // indirect
gorgonia.org/vecf32 v0.9.0 // indirect
gorgonia.org/vecf64 v0.9.0 // indirect
)
213 changes: 213 additions & 0 deletions Chapter01/RPL/714222073/go.sum

Large diffs are not rendered by default.

49 changes: 49 additions & 0 deletions Chapter01/RPL/714222073/persamaanelips.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
package main

import (
"fmt"

G "gorgonia.org/gorgonia"
)

func main() {
g := G.NewGraph()

// defenisikan konstanta a dan b
a := G.NewScalar(g, G.Float64, G.WithName("a"))
b := G.NewScalar(g, G.Float64, G.WithName("b"))

// defenisikan variable x dan y
x := G.NewScalar(g, G.Float64, G.WithName("x"))
y := G.NewScalar(g, G.Float64, G.WithName("y"))

// formula persamaan elips (x/a)^2 + (y/b)^2 - 1
formula1 := G.Must(G.Pow(x, G.NewConstant(2.0)))
formula2 := G.Must(G.Pow(y, G.NewConstant(2.0)))
formula3 := G.Must(G.Pow(a, G.NewConstant(2.0)))
formula4 := G.Must(G.Pow(b, G.NewConstant(2.0)))
formula5 := G.Must(G.Div(formula1, formula3))
formula6 := G.Must(G.Div(formula2, formula4))
formula7 := G.Must(G.Add(formula5, formula6))
formula8 := G.Must(G.Sub(G.NewConstant(1.0), formula7))

machine := G.NewTapeMachine(g)
defer machine.Close()

// inisiasi nilai konstanta a dan b
G.Let(a, 2.0)
G.Let(b, 4.0)

// inisiasi nilai variabel x dan y
G.Let(x, 1.0)
G.Let(y, 3.0)

// menjalankan model fungsi
if err := machine.RunAll(); err != nil {
fmt.Print(err)
}

// cetak hasil
fmt.Printf("Hasilnya : %v", formula8.Value().Data())

}
7 changes: 7 additions & 0 deletions Chapter01/RPL/714222073/readme.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
Tugas Chapter01

Membuat kode program persamaan elips = (x/a)^2 + (y/b)^2 = 1

Screenshoot hasil running :

![hasil running](https://github.com/DiniLubis/ai/assets/119611222/ee38814a-543a-4e46-9ace-928654c5f09f)
29 changes: 29 additions & 0 deletions Chapter02/RPL/714222073/go.mod
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
module 714222073

go 1.20

require (
gorgonia.org/gorgonia v0.9.17
gorgonia.org/tensor v0.9.24
)

require (
github.com/apache/arrow/go/arrow v0.0.0-20210105145422-88aaea5262db // indirect
github.com/awalterschulze/gographviz v0.0.0-20190221210632-1e9ccb565bca // indirect
github.com/chewxy/hm v1.0.0 // indirect
github.com/chewxy/math32 v1.0.8 // indirect
github.com/gogo/protobuf v1.3.2 // indirect
github.com/golang/protobuf v1.4.3 // indirect
github.com/google/flatbuffers v1.12.0 // indirect
github.com/leesper/go_rng v0.0.0-20171009123644-5344a9259b21 // indirect
github.com/pkg/errors v0.9.1 // indirect
github.com/xtgo/set v1.0.0 // indirect
go4.org/unsafe/assume-no-moving-gc v0.0.0-20220617031537-928513b29760 // indirect
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect
gonum.org/v1/gonum v0.8.2 // indirect
google.golang.org/protobuf v1.25.0 // indirect
gorgonia.org/cu v0.9.3 // indirect
gorgonia.org/dawson v1.2.0 // indirect
gorgonia.org/vecf32 v0.9.0 // indirect
gorgonia.org/vecf64 v0.9.0 // indirect
)
234 changes: 234 additions & 0 deletions Chapter02/RPL/714222073/go.sum

Large diffs are not rendered by default.

207 changes: 207 additions & 0 deletions Chapter02/RPL/714222073/main.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,207 @@
package main

import (
"fmt"
"log"
"math/rand"

. "gorgonia.org/gorgonia"
"gorgonia.org/tensor"
)

var err error

type nn struct {
g *ExprGraph
w0, w1, w2, w3, w4, w5, w6, w7, w8 *Node

pred *Node
predVal Value
}

func newNN(g *ExprGraph) *nn {
// Create node for w/weight
wB := tensor.Random(tensor.Float64, 3)
wB1 := tensor.Random(tensor.Float64, 3)
wB2 := tensor.Random(tensor.Float64, 3)
wB3 := tensor.Random(tensor.Float64, 3)
wB4 := tensor.Random(tensor.Float64, 3)
wB5 := tensor.Random(tensor.Float64, 3)
wB6 := tensor.Random(tensor.Float64, 3)
wB7 := tensor.Random(tensor.Float64, 3)
wB8 := tensor.Random(tensor.Float64, 3)

wT := tensor.New(tensor.WithBacking(wB), tensor.WithShape(3, 1))
wT1 := tensor.New(tensor.WithBacking(wB1), tensor.WithShape(1, 3))
wT2 := tensor.New(tensor.WithBacking(wB2), tensor.WithShape(3, 1))
wT3 := tensor.New(tensor.WithBacking(wB3), tensor.WithShape(1, 3))
wT4 := tensor.New(tensor.WithBacking(wB4), tensor.WithShape(3, 1))
wT5 := tensor.New(tensor.WithBacking(wB5), tensor.WithShape(1, 3))
wT6 := tensor.New(tensor.WithBacking(wB6), tensor.WithShape(3, 1))
wT7 := tensor.New(tensor.WithBacking(wB7), tensor.WithShape(1, 3))
wT8 := tensor.New(tensor.WithBacking(wB8), tensor.WithShape(3, 1))

w0 := NewMatrix(g,
tensor.Float64,
WithName("w"),
WithShape(3, 1),
WithValue(wT),
)
w1 := NewMatrix(g,
tensor.Float64,
WithName("w"),
WithShape(1, 3),
WithValue(wT1),
)
w2 := NewMatrix(g,
tensor.Float64,
WithName("w"),
WithShape(3, 1),
WithValue(wT2),
)
w3 := NewMatrix(g,
tensor.Float64,
WithName("w"),
WithShape(1, 3),
WithValue(wT3),
)
w4 := NewMatrix(g,
tensor.Float64,
WithName("w"),
WithShape(3, 1),
WithValue(wT4),
)
w5 := NewMatrix(g,
tensor.Float64,
WithName("w"),
WithShape(1, 3),
WithValue(wT5),
)
w6 := NewMatrix(g,
tensor.Float64,
WithName("w"),
WithShape(3, 1),
WithValue(wT6),
)
w7 := NewMatrix(g,
tensor.Float64,
WithName("w"),
WithShape(1, 3),
WithValue(wT7),
)
w8 := NewMatrix(g,
tensor.Float64,
WithName("w"),
WithShape(3, 1),
WithValue(wT8),
)
return &nn{
g: g,
w0: w0,
w1: w1,
w2: w2,
w3: w3,
w4: w4,
w5: w5,
w6: w6,
w7: w7,
w8: w8,
}
}

func (m *nn) learnables() Nodes {
return Nodes{m.w8}
}

func (m *nn) fwd(x *Node) (err error) {
var l0, l1, l2, l3, l4, l5, l6, l7, l8 *Node

// Set first layer to be copy of input
l0 = x

// Dot product of l0 and w0, use as input for Sigmoid
l0dot := Must(Mul(l0, m.w0))

// Build hidden layer out of result
l1 = Must(Tanh(l0dot))
l1dot := Must(Mul(l1, m.w1))
l2 = Must(LeakyRelu(l1dot, 0.1))
l2dot := Must(Mul(l2, m.w2))
l3 = Must(Log1p(l2dot))
l3dot := Must(Mul(l3, m.w3))
l4 = Must(Sin(l3dot))
l4dot := Must(Mul(l4, m.w4))
l5 = Must(Cos(l4dot))
l5dot := Must(Mul(l5, m.w5))
l6 = Must(Exp(l5dot))
l6dot := Must(Mul(l6, m.w6))
l7 = Must(Cube(l6dot))
l7dot := Must(Mul(l7, m.w7))
l8 = Must(Sigmoid(l7dot))
l8dot := Must(Mul(l8, m.w8))

m.pred = l8dot
Read(m.pred, &m.predVal)
return nil

}

func main() {

rand.Seed(31117)

// Create graph and network
g := NewGraph()
m := newNN(g)

// Set input x to network
xB := []float64{0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1}
xT := tensor.New(tensor.WithBacking(xB), tensor.WithShape(4, 3))
x := NewMatrix(g,
tensor.Float64,
WithName("X"),
WithShape(4, 3),
WithValue(xT),
)

// Define validation data set
yB := []float64{0, 0, 1, 1}
yT := tensor.New(tensor.WithBacking(yB), tensor.WithShape(4, 1))
y := NewMatrix(g,
tensor.Float64,
WithName("y"),
WithShape(4, 1),
WithValue(yT),
)

// Run forward pass
if err := m.fwd(x); err != nil {
log.Fatalf("%+v", err)
}

// Calculate Cost w/MSE
fmt.Println("Output before Training: \n", m.pred.Shape())
losses := Must(Sub(y, m.pred))
square := Must(Square(losses))
cost := Must(Mean(square))

// Do Gradient updates
if _, err = Grad(cost, m.learnables()...); err != nil {
log.Fatal(err)
}

// Instantiate VM and Solver
vm := NewTapeMachine(g, BindDualValues(m.learnables()...))
solver := NewVanillaSolver(WithLearnRate(1.0))

for i := 0; i < 1000; i++ {
vm.Reset()
fmt.Printf("\nStep %d\n", i)
if err = vm.RunAll(); err != nil {
log.Fatalf("Failed at inter %d: %v", i, err)
}
solver.Step(NodesToValueGrads(m.learnables()))
vm.Reset()
}
fmt.Println("\n\nOutput after Training: \n", m.predVal)
}
Loading