Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
95 changes: 95 additions & 0 deletions darknet/cfg/alexnet.cfg
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
[net]
batch=128
subdivisions=1
height=227
width=227
channels=3
momentum=0.9
decay=0.0005
max_crop=256

learning_rate=0.01
policy=poly
power=4
max_batches=800000

angle=7
hue = .1
saturation=.75
exposure=.75
aspect=.75

[convolutional]
filters=96
size=11
stride=4
pad=0
activation=relu

[maxpool]
size=3
stride=2
padding=0

[convolutional]
filters=256
size=5
stride=1
pad=1
activation=relu

[maxpool]
size=3
stride=2
padding=0

[convolutional]
filters=384
size=3
stride=1
pad=1
activation=relu

[convolutional]
filters=384
size=3
stride=1
pad=1
activation=relu

[convolutional]
filters=256
size=3
stride=1
pad=1
activation=relu

[maxpool]
size=3
stride=2
padding=0

[connected]
output=4096
activation=relu

[dropout]
probability=.5

[connected]
output=4096
activation=relu

[dropout]
probability=.5

[connected]
output=1000
activation=linear

[softmax]
groups=1

[cost]
type=sse

124 changes: 124 additions & 0 deletions darknet/cfg/cifar.cfg
Original file line number Diff line number Diff line change
@@ -0,0 +1,124 @@
[net]
batch=128
subdivisions=1
height=28
width=28
channels=3
max_crop=32
min_crop=32

hue=.1
saturation=.75
exposure=.75

learning_rate=0.4
policy=poly
power=4
max_batches = 5000
momentum=0.9
decay=0.0005


[convolutional]
batch_normalize=1
filters=128
size=3
stride=1
pad=1
activation=leaky

[convolutional]
batch_normalize=1
filters=128
size=3
stride=1
pad=1
activation=leaky

[convolutional]
batch_normalize=1
filters=128
size=3
stride=1
pad=1
activation=leaky

[maxpool]
size=2
stride=2

[dropout]
probability=.5

[convolutional]
batch_normalize=1
filters=256
size=3
stride=1
pad=1
activation=leaky

[convolutional]
batch_normalize=1
filters=256
size=3
stride=1
pad=1
activation=leaky

[convolutional]
batch_normalize=1
filters=256
size=3
stride=1
pad=1
activation=leaky

[maxpool]
size=2
stride=2

[dropout]
probability=.5

[convolutional]
batch_normalize=1
filters=512
size=3
stride=1
pad=1
activation=leaky

[convolutional]
batch_normalize=1
filters=512
size=3
stride=1
pad=1
activation=leaky

[convolutional]
batch_normalize=1
filters=512
size=3
stride=1
pad=1
activation=leaky

[dropout]
probability=.5

[convolutional]
filters=10
size=1
stride=1
pad=1
activation=leaky

[avgpool]

[softmax]
groups=1

[cost]

119 changes: 119 additions & 0 deletions darknet/cfg/cifar.test.cfg
Original file line number Diff line number Diff line change
@@ -0,0 +1,119 @@
[net]
batch=128
subdivisions=1
height=32
width=32
channels=3
momentum=0.9
decay=0.0005

learning_rate=0.4
policy=poly
power=4
max_batches = 50000


[convolutional]
batch_normalize=1
filters=128
size=3
stride=1
pad=1
activation=leaky

[convolutional]
batch_normalize=1
filters=128
size=3
stride=1
pad=1
activation=leaky

[convolutional]
batch_normalize=1
filters=128
size=3
stride=1
pad=1
activation=leaky

[maxpool]
size=2
stride=2

[dropout]
probability=.5

[convolutional]
batch_normalize=1
filters=256
size=3
stride=1
pad=1
activation=leaky

[convolutional]
batch_normalize=1
filters=256
size=3
stride=1
pad=1
activation=leaky

[convolutional]
batch_normalize=1
filters=256
size=3
stride=1
pad=1
activation=leaky

[maxpool]
size=2
stride=2

[dropout]
probability=.5

[convolutional]
batch_normalize=1
filters=512
size=3
stride=1
pad=1
activation=leaky

[convolutional]
batch_normalize=1
filters=512
size=3
stride=1
pad=1
activation=leaky

[convolutional]
batch_normalize=1
filters=512
size=3
stride=1
pad=1
activation=leaky

[dropout]
probability=.5

[convolutional]
filters=10
size=1
stride=1
pad=1
activation=leaky

[avgpool]

[softmax]
groups=1
temperature=3

[cost]

8 changes: 8 additions & 0 deletions darknet/cfg/coco.data
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
classes= 80
train = /home/pjreddie/data/coco/trainvalno5k.txt
#valid = coco_testdev
valid = data/coco_val_5k.list
names = data/coco.names
backup = /home/pjreddie/backup/
eval=coco

10 changes: 10 additions & 0 deletions darknet/cfg/combine9k.data
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
classes= 9418
#train = /home/pjreddie/data/coco/trainvalno5k.txt
train = data/combine9k.train.list
valid = /home/pjreddie/data/imagenet/det.val.files
labels = data/9k.labels
names = data/9k.names
backup = backup/
map = data/inet9k.map
eval = imagenet
results = results
Loading