From 489db419954161724cd04efe97b3432d7196abf0 Mon Sep 17 00:00:00 2001 From: Lutz Roeder Date: Sun, 15 Dec 2019 22:42:47 -1000 Subject: [PATCH] Darknet shape inference (#370) (#71) --- src/darknet.js | 467 ++++++++++++++++++++++++++++++++--------------- test/models.json | 79 +++++++- tools/darknet | 2 +- 3 files changed, 394 insertions(+), 154 deletions(-) diff --git a/src/darknet.js b/src/darknet.js index 1b1503e6cbf..e3524c2d2b4 100644 --- a/src/darknet.js +++ b/src/darknet.js @@ -70,8 +70,8 @@ darknet.Graph = class { let nu = 0; while (lines.length > 0) { nu++; - let line = lines.shift(); - line = line.replace(/\s/g, ''); + const text = lines.shift(); + const line = text.replace(/\s/g, ''); if (line.length > 0) { switch (line[0]) { case '#': @@ -85,10 +85,13 @@ darknet.Graph = class { break; } default: { + if (!section || line[0] < 0x20 || line[0] > 0x7E) { + throw new darknet.Error("Invalid cfg '" + text.replace(/[^\x20-\x7E]+/g, '').trimStart().trimEnd() + "' at line " + nu.toString() + "."); + } if (section) { let property = line.split('='); if (property.length != 2) { - throw new darknet.Error("Invalid cfg '" + line + "' at line " + nu.toString() + "."); + throw new darknet.Error("Invalid cfg '" + text.replace(/[^\x20-\x7E]+/g, '').trimStart().trimEnd() + "' at line " + nu.toString() + "."); } let key = property[0].trim(); let value = property[1].trim(); @@ -100,94 +103,315 @@ darknet.Graph = class { } } - for (let section of sections) { - section.values = {}; - const schema = metadata.getSchema(section.type); - if (schema && schema.attributes) { - for (let attribute of schema.attributes) { - if (attribute.name) { - if (section.options[attribute.name] !== undefined) { - switch (attribute.type) { - case 'int32': - section.values[attribute.name] = parseInt(section.options[attribute.name], 10); - break; - case 'float32': - section.values[attribute.name] = parseFloat(section.options[attribute.name]); - break; - case 'string': - section.values[attribute.name] = section.options[attribute.name]; - break; - } - } - else if (attribute.default !== undefined) { - section.values[attribute.name] = attribute.default - } - } - } + const option_find_int = (options, key, defaultValue) => { + const value = options[key]; + return value !== undefined ? parseInt(value, 10) : defaultValue; + }; + + const option_find_str = (options, key, defaultValue) => { + const value = options[key]; + return value !== undefined ? value : defaultValue; + }; + + let params = {}; + + const net = sections.shift(); + switch (net.type) { + case 'net': + case 'network': { + params.h = option_find_int(net.options, 'height', 0); + params.w = option_find_int(net.options, 'width', 0); + params.c = option_find_int(net.options, 'channels', 0); + params.inputs = option_find_int(net.options, 'inputs', params.h * params.w * params.c); + break; } } - if (sections.length === 0) { - throw new darknet.Error('Config file has no sections.'); - } - - let net = sections.shift(); - if (net.type !== 'net' && net.type !== 'network') { - throw new darknet.Error('First section must be [net] or [network].'); - } - - const inputType = new darknet.TensorType('float32', new darknet.TensorShape([ net.values.width, net.values.height, net.values.channels ])); - + const inputType = params.w && params.h && params.c ? + new darknet.TensorType('float32', new darknet.TensorShape([ params.w, params.h, params.c ])) : + new darknet.TensorType('float32', new darknet.TensorShape([ params.inputs ])); const inputName = 'input'; - this._inputs.push(new darknet.Parameter(inputName, true, [ - new darknet.Argument(inputName, inputType, null) - ])); + params.arguments = [ new darknet.Argument(inputName, inputType, null) ]; + this._inputs.push(new darknet.Parameter(inputName, true, params.arguments)); - for (let i = 0; i < sections.length; i++) { - sections[i]._outputs = [ i.toString() ]; + if (sections.length === 0) { + throw new darknet.Error('Config file has no sections.'); } - let inputs = [ inputName ]; + let infer = true; for (let i = 0; i < sections.length; i++) { - const layer = sections[i]; - layer._inputs = inputs; - inputs = [ i.toString() ]; - switch (layer.type) { + let section = sections[i]; + section.layer = {}; + section.tensors = []; + section.inputs = []; + section.outputs = []; + const options = section.options; + let layer = section.layer; + section.inputs = section.inputs.concat(params.arguments); + section.outputs.push(new darknet.Argument(i.toString(), null, null)); + switch (section.type) { case 'shortcut': case 'sam': case 'scale_channels': { - let from = Number.parseInt(layer.options.from, 10); - from = (from >= 0) ? from : (i + from); - const shortcut = sections[from]; - if (shortcut) { - layer._inputs.push(shortcut._outputs[0]); + let index = option_find_int(options, 'from', 0); + if (index < 0) { + index = i + index; } + const from = sections[index]; + if (from) { + section.inputs.push(from.outputs[0]); + section.from = from; + } + delete options.from; break; } case 'route': { - layer._inputs = []; - const routes = layer.options.layers.split(',').map((route) => Number.parseInt(route.trim(), 10)); + section.inputs = []; + section.input_sections = []; + const routes = options.layers.split(',').map((route) => Number.parseInt(route.trim(), 10)); for (let j = 0; j < routes.length; j++) { const index = (routes[j] < 0) ? i + routes[j] : routes[j]; const route = sections[index]; if (route) { - layer._inputs.push(route._outputs[0]); + section.inputs.push(route.outputs[0]); + section.input_sections.push(route); } } + delete options.layers; break; } } + if (infer) { + switch (section.type) { + case 'convolutional': + case 'deconvolutional' { + const w = params.w; + const h = params.h; + const c = params.c; + const size = option_find_int(options, 'size', 1); + const n = option_find_int(options, 'filters', 1); + const pad = option_find_int(options, 'pad', 0); + const padding = pad ? (size >> 1) : option_find_int(options, 'padding', 0); + const stride = option_find_int(options, 'stride', 1); + const groups = option_find_int(options, 'groups', 1); + layer.out_w = Math.floor((w + 2 * padding - size) / stride) + 1; + layer.out_h = Math.floor((h + 2 * padding - size) / stride) + 1; + layer.out_c = n; + layer.outputs = layer.out_h * layer.out_w * layer.out_c; + section.tensors.push({ name: 'weights', shape: [ Math.floor(c / groups), n, size, size ]}); + section.tensors.push({ name: 'biases', shape: [ n ]}); + section.outputs[0].type = new darknet.TensorType('float32', new darknet.TensorShape([ layer.out_h, layer.out_w, layer.out_c ])); + break; + } + case 'connected': { + const outputs = option_find_int(options, 'output', 1); + section.tensors.push({ name: 'weights', shape: [ params.inputs, outputs ] }); + section.tensors.push({ name: 'biases', shape: [ outputs ] }); + section.outputs[0].type = new darknet.TensorType('float32', new darknet.TensorShape([ outputs ])); + layer.out_h = 1; + layer.out_w = 1; + layer.out_c = outputs; + layer.outputs = outputs; + break; + } + case 'maxpool': { + if (section.inputs[0].type.shape.dimensions[0] !== params.w || + section.inputs[0].type.shape.dimensions[1] !== params.h || + section.inputs[0].type.shape.dimensions[2] !== params.c) { + throw darknet.Error('Layer before maxpool layer must output image.'); + } + const stride = option_find_int(options, 'stride', 1); + const size = option_find_int(options, 'size', stride); + const padding = option_find_int(options, 'padding', size - 1); + layer.out_w = Math.floor((params.w + padding - size) / stride) + 1; + layer.out_h = Math.floor((params.h + padding - size) / stride) + 1; + layer.out_c = params.c; + layer.outputs = layer.out_w * layer.out_h * layer.out_c; + section.outputs[0].type = new darknet.TensorType('float32', new darknet.TensorShape([ layer.out_w, layer.out_h, layer.out_c ])); + break; + } + case 'avgpool': { + if (section.inputs[0].type.shape.dimensions[0] !== params.w || + section.inputs[0].type.shape.dimensions[1] !== params.h || + section.inputs[0].type.shape.dimensions[2] !== params.c) { + throw darknet.Error('Layer before avgpool layer must output image.'); + } + layer.out_w = 1; + layer.out_h = 1; + layer.out_c = params.c; + layer.outputs = layer.out_c; + section.outputs[0].type = new darknet.TensorType('float32', new darknet.TensorShape([ layer.out_w, layer.out_h, layer.out_c ])); + break; + } + case 'rnn': { + const output = option_find_int(options, "output", 1); + layer.outputs = output; + section.outputs[0].type = new darknet.TensorType('float32', new darknet.TensorShape([ output ])); + break; + } + case 'lstm': { + const output = option_find_int(options, "output", 1); + layer.outputs = output; + section.outputs[0].type = new darknet.TensorType('float32', new darknet.TensorShape([ output ])); + break; + } + case 'softmax': + case 'dropout': { + layer.out_w = params.w; + layer.out_h = params.h; + layer.out_c = params.c; + layer.outputs = params.inputs; + section.outputs[0].type = new darknet.TensorType('float32', new darknet.TensorShape([ layer.outputs ])); + break; + } + case 'upsample': { + const stride = option_find_int(options, 'stride', 2); + layer.out_w = params.w * stride; + layer.out_h = params.h * stride; + layer.out_c = params.c; + layer.outputs = layer.out_w * layer.out_h * layer.out_c; + section.outputs[0].type = new darknet.TensorType('float32', new darknet.TensorShape([ layer.out_w, layer.out_h, layer.out_c ])); + break; + } + case 'yolo': { + const w = params.w; + const h = params.h; + const classes = option_find_int(options, 'classes', 20); + const n = option_find_int(options, 'num', 1); + layer.out_h = h; + layer.out_w = w; + layer.out_c = n * (classes + 4 + 1); + layer.outputs = layer.out_h * layer.out_w * layer.out_c; + section.outputs[0].type = new darknet.TensorType('float32', new darknet.TensorShape([ layer.out_w, layer.out_h, layer.out_c ])); + break; + } + case 'reorg': { + const stride = option_find_int(options, 'stride', 1); + const reverse = option_find_int(options, 'reverse', 0); + const extra = option_find_int(options, 'extra', 0); + if (reverse) { + layer.out_w = params.w * stride; + layer.out_h = params.h * stride; + layer.out_c = Math.floor(params.c / (stride * stride)); + } + else { + layer.out_w = Math.floor(params.w / stride); + layer.out_h = Math.floor(params.h / stride); + layer.out_c = params.c * (stride * stride); + } + layer.outputs = layer.out_h * layer.out_w * layer.out_c; + if (extra) { + layer.out_w = 0; + layer.out_h = 0; + layer.out_c = 0; + layer.outputs = (params.h * params.w * params.c) + extra; + } + section.outputs[0].type = new darknet.TensorType('float32', new darknet.TensorShape([ layer.outputs ])); + break; + } + case 'scale_channels': { + infer = false; + break; + } + case 'route': { + let layers = section.input_sections.map((section) => section.layer); + layer.outputs = 0; + for (let input_layer of layers) { + layer.outputs += input_layer.outputs; + } + const first = layers.shift(); + layer.out_w = first.out_w; + layer.out_h = first.out_h; + layer.out_c = first.out_c; + while (layers.length > 0) { + const next = layers.shift(); + if (next.out_w === first.out_w && next.out_h === first.out_h) { + layer.out_c += next.out_c; + } + else { + layer.out_h = 0; + layer.out_w = 0; + layer.out_c = 0; + } + } + section.outputs[0].type = new darknet.TensorType('float32', new darknet.TensorShape([ layer.out_h, layer.out_w, layer.out_c ])); + break; + } + case 'shortcut': { + const from = section.from; + layer.w = from.layer.out_w; + layer.h = from.layer.out_h; + layer.c = from.layer.out_c; + layer.out_w = params.w; + layer.out_h = params.h; + layer.out_c = params.c; + layer.outputs = params.w * params.h * params.c; + section.outputs[0].type = new darknet.TensorType('float32', new darknet.TensorShape([ params.w, params.h, params.c ])); + break; + } + default: { + console.log(' ' + section.type); + infer = false; + break; + } + } + params.h = layer.out_h; + params.w = layer.out_w; + params.c = layer.out_c; + params.inputs = layer.outputs; + } + params.arguments = section.outputs; + + const batch_normalize = option_find_int(section.options, 'batch_normalize', 0); + if (batch_normalize) { + let size = -1; + switch (section.type) { + case 'convolutional': { + size = option_find_int(options, 'filters', 1); + break; + } + case 'crnn': + case 'gru': + case 'rnn': + case 'lstm': + case 'connected': { + size = option_find_int(options, 'output', 1); + break; + } + } + if (size < 0) { + throw new darknet.Error("Invalid batch_normalize size for '" + section.type + "'."); + } + let chain = {}; + chain.type = 'batch_normalize'; + chain.tensors = [ + { name: 'scale', shape: [ size ] }, + { name: 'mean', shape: [ size ] }, + { name: 'variance', shape: [ size ] } + ]; + section.chain = section.chain || []; + section.chain.push(chain); + } + + const defaultActivation = section.type === 'shortcut' ? 'linear' : 'logistic'; + const activation = option_find_str(section.options, 'activation', defaultActivation); + if (activation !== defaultActivation) { + let chain = {}; + chain.type = activation; + section.chain = section.chain || []; + section.chain.push(chain); + } } + for (let i = 0; i < sections.length; i++) { this._nodes.push(new darknet.Node(metadata, net, sections[i], i.toString())); } if (sections.length > 0) { - const lastLayer = sections[sections.length - 1]; - for (let i = 0; i < lastLayer._outputs.length; i++) { - this._outputs.push(new darknet.Parameter('output' + (i > 1 ? i.toString() : ''), true, [ - new darknet.Argument(lastLayer._outputs[i], null, null) - ])); + const last = sections[sections.length - 1]; + for (let i = 0; i < last.outputs.length; i++) { + const outputName = 'output' + (i > 1 ? i.toString() : ''); + this._outputs.push(new darknet.Parameter(outputName, true, [ last.outputs[i] ])); } } } @@ -245,6 +469,13 @@ darknet.Argument = class { return this._type; } + set type(value) { + if (this._type) { + throw new darknet.Error('Invalid argument type set operation.'); + } + this._type = value; + } + get initializer() { return this._initializer; } @@ -252,72 +483,37 @@ darknet.Argument = class { darknet.Node = class { - constructor(metadata, net, layer, name) { + constructor(metadata, net, section, name) { this._name = name; this._metadata = metadata; - this._operator = layer.type; + this._operator = section.type; this._attributes = []; this._inputs = []; this._outputs = []; this._chain = []; - if (layer._inputs && layer._inputs.length > 0) { - this._inputs.push(new darknet.Parameter(layer._inputs.length <= 1 ? 'input' : 'inputs', true, layer._inputs.map((input) => { - return new darknet.Argument(input, null, null); - }))); + if (section.inputs && section.inputs.length > 0) { + this._inputs.push(new darknet.Parameter(section.inputs.length <= 1 ? 'input' : 'inputs', true, section.inputs)); } - if (layer._outputs && layer._outputs.length > 0) { - this._outputs.push(new darknet.Parameter(layer._outputs.length <= 1 ? 'output' : 'outputs', true, layer._outputs.map((output) => { - return new darknet.Argument(output, null, null); - }))); + if (section.tensors && section.tensors.length > 0) { + for (let tensor of section.tensors) { + const type = new darknet.TensorType('float', new darknet.TensorShape(tensor.shape)); + this._inputs.push(new darknet.Parameter(tensor.name, true, [ + new darknet.Argument('', null, new darknet.Tensor('', type) ) + ])) + } } - switch (layer.type) { - case 'convolutional': - case 'deconvolutional': - this._initializer('biases', [ layer.values.filters ]); - this._initializer('weights', [ net.values.channels, layer.values.size, layer.values.size, layer.values.filters ]); - this._batch_normalize(metadata, net, layer, layer.values.filters); - this._activation(metadata, net, layer, 'logistic'); - break; - case 'connected': - this._initializer('biases', [ layer.values.output ]); - this._initializer('weights'); - this._batch_normalize(metadata, net, layer, layer.values.output); - this._activation(metadata, net, layer); - break; - case 'crnn': - this._batch_normalize(metadata, net, layer); - this._activation(metadata, net, layer); - break; - case 'rnn': - this._batch_normalize(metadata, net, layer, layer.values.output); - this._activation(metadata, net, layer); - break; - case 'gru': - this._batch_normalize(metadata, net, layer); - break; - case 'lstm': - this._batch_normalize(metadata, net, layer); - break; - case 'shortcut': - this._activation(metadata, net, layer); - break; - case 'batch_normalize': - this._initializer('scale', [ layer.values.size ]); - this._initializer('mean', [ layer.values.size ]); - this._initializer('variance', [ layer.values.size ]); - break; + if (section.outputs && section.outputs.length > 0) { + this._outputs.push(new darknet.Parameter(section.outputs.length <= 1 ? 'output' : 'outputs', true, section.outputs)); } - - switch (layer.type) { - case 'shortcut': - delete layer.options.from; - break; - case 'route': - delete layer.options.layers; - break; + if (section.chain) { + for (let chain of section.chain) { + this._chain.push(new darknet.Node(metadata, net, chain, '')); + } } - for (let key of Object.keys(layer.options)) { - this._attributes.push(new darknet.Attribute(metadata, this._operator, key, layer.options[key])); + if (section.options) { + for (let key of Object.keys(section.options)) { + this._attributes.push(new darknet.Attribute(metadata, this._operator, key, section.options[key])); + } } } @@ -390,31 +586,6 @@ darknet.Node = class { get chain() { return this._chain; } - - _initializer(name, shape) { - const id = this._name.toString() + '_' + name; - this._inputs.push(new darknet.Parameter(name, true, [ - new darknet.Argument(id, null, new darknet.Tensor(id, shape)) - ])); - } - - _batch_normalize(metadata, net, layer, size) { - if (layer.values.batch_normalize === 1) { - const batch_normalize_layer = { type: 'batch_normalize', options: {}, values: { size: size || 0 }, _inputs: [], _outputs: [] }; - this._chain.push(new darknet.Node(metadata, net, batch_normalize_layer, '')); - } - delete layer.options.batch_normalize; - } - - _activation(metadata, net, layer) { - const attributeSchema = metadata.getAttributeSchema(layer.type, 'activation'); - if (attributeSchema) { - if (layer.options.activation !== attributeSchema.default) { - this._chain.push(new darknet.Node(metadata, net, { type: layer.options.activation, options: {}, values: {}, _inputs: [], _outputs: [] }, '')); - } - delete layer.options.activation; - } - } }; darknet.Attribute = class { @@ -465,10 +636,13 @@ darknet.Attribute = class { darknet.Tensor = class { - constructor(id, shape) { - shape = shape || null; + constructor(id, type) { this._id = id; - this._type = new darknet.TensorType('?', new darknet.TensorShape(shape)); + this._type = type; + } + + get kind() { + return 'Tensor'; } get name() { @@ -515,6 +689,9 @@ darknet.TensorType = class { darknet.TensorShape = class { constructor(dimensions) { + if (dimensions.some((dimension) => !dimension)) { + throw new darknet.Error('Invalid tensor shape.'); + } this._dimensions = dimensions; } diff --git a/test/models.json b/test/models.json index 8dbcfa08d84..79ce5e72f45 100644 --- a/test/models.json +++ b/test/models.json @@ -1834,7 +1834,7 @@ { "type": "darknet", "target": "alexnet.cfg,alexnet.weights", - "source": "https://raw.githubusercontent.com/pjreddie/darknet/master/cfg/alexnet.cfg,https://pjreddie.com/media/files/alexnet.weights", + "source": "https://github.com/pjreddie/darknet/blob/master/cfg/alexnet.cfg?raw=true,https://pjreddie.com/media/files/alexnet.weights", "format": "Darknet", "link": "https://pjreddie.com/darknet/imagenet" }, @@ -1855,7 +1855,7 @@ { "type": "darknet", "target": "darknet53_448.cfg,darknet53_448.weights", - "source": "https://raw.githubusercontent.com/pjreddie/darknet/master/cfg/darknet53_448.cfg,https://pjreddie.com/media/files/darknet53_448.weights", + "source": "https://github.com/pjreddie/darknet/blob/master/cfg/darknet53_448.cfg?raw=true,https://pjreddie.com/media/files/darknet53_448.weights", "format": "Darknet", "link": "https://pjreddie.com/darknet/imagenet" }, @@ -1866,6 +1866,13 @@ "format": "Darknet", "link": "https://github.com/lutzroeder/netron/issues/381" }, + { + "type": "darknet", + "target": "lstm.train.cfg", + "source": "https://github.com/AlexeyAB/darknet/blob/master/cfg/lstm.train.cfg?raw=true", + "format": "Darknet", + "link": "https://github.com/AlexeyAB/darknet" + }, { "type": "darknet", "target": "mixnet_m_gpu.cfg", @@ -1873,24 +1880,38 @@ "format": "Darknet", "link": "https://github.com/lutzroeder/netron/issues/381" }, + { + "type": "darknet", + "target": "msr_50.cfg", + "source": "https://github.com/AlexeyAB/darknet/blob/master/cfg/msr_50.cfg?raw=true", + "format": "Darknet", + "link": "https://github.com/AlexeyAB/darknet" + }, { "type": "darknet", "target": "go.cfg,go.weights", - "source": "https://raw.githubusercontent.com/pjreddie/darknet/master/cfg/go.cfg,https://pjreddie.com/media/files/go.weights", + "source": "https://github.com/pjreddie/darknet/blob/master/cfg/go.cfg?raw=true,https://pjreddie.com/media/files/go.weights", "format": "Darknet", "link": "https://pjreddie.com/darknet/darkgo-go-in-darknet" }, + { + "type": "darknet", + "target": "gru.cfg", + "source": "https://github.com/pjreddie/darknet/blob/master/cfg/gru.cfg?raw=true", + "format": "Darknet", + "link": "https://pjreddie.com/darknet" + }, { "type": "darknet", "target": "jnet-conv.cfg,jnet-conv.weights", - "source": "https://raw.githubusercontent.com/pjreddie/darknet/master/cfg/jnet-conv.cfg,https://pjreddie.com/media/files/jnet-conv.weights", + "source": "https://github.com/pjreddie/darknet/blob/master/cfg/jnet-conv.cfg?raw=true,https://pjreddie.com/media/files/jnet-conv.weights", "format": "Darknet", "link": "https://pjreddie.com/darknet/nightmare" }, { "type": "darknet", "target": "grrm.cfg,grrm.weights", - "source": "https://raw.githubusercontent.com/pjreddie/darknet/master/cfg/rnn.cfg,https://pjreddie.com/media/files/grrm.weights", + "source": "https://github.com/pjreddie/darknet/blob/master/cfg/rnn.cfg?raw=true,https://pjreddie.com/media/files/grrm.weights", "format": "Darknet", "link": "https://pjreddie.com/darknet/rnns-in-darknet" }, @@ -1904,24 +1925,66 @@ { "type": "darknet", "target": "resnet18.cfg,resnet18.weights", - "source": "https://raw.githubusercontent.com/pjreddie/darknet/master/cfg/resnet18.cfg,https://pjreddie.com/media/files/resnet18.weights", + "source": "https://github.com/pjreddie/darknet/blob/master/cfg/resnet18.cfg?raw=true,https://pjreddie.com/media/files/resnet18.weights", "format": "Darknet", "link": "https://pjreddie.com/darknet/imagenet" }, { "type": "darknet", "target": "resnet152.cfg,resnet152.weights", - "source": "https://raw.githubusercontent.com/pjreddie/darknet/master/cfg/resnet152.cfg,https://pjreddie.com/media/files/resnet152.weights", + "source": "https://github.com/pjreddie/darknet/blob/master/cfg/resnet152.cfg?raw=true,https://pjreddie.com/media/files/resnet152.weights", "format": "Darknet", "link": "https://pjreddie.com/darknet/imagenet" }, + { + "type": "darknet", + "target": "tiny.cfg,tiny.weights", + "source": "https://github.com/pjreddie/darknet/blob/master/cfg/tiny.cfg?raw=true,https://pjreddie.com/media/files/tiny.weights", + "format": "Darknet", + "link": "https://pjreddie.com/darknet/tiny-darknet" + }, + { + "type": "darknet", + "target": "yolo-coco.cfg", + "source": "https://github.com/AlexeyAB/darknet/blob/master/cfg/yolov1/yolo-coco.cfg?raw=true", + "format": "Darknet", + "link": "https://github.com/AlexeyAB/darknet" + }, + { + "type": "darknet", + "target": "yolo-voc.cfg", + "source": "https://github.com/AlexeyAB/darknet/blob/master/cfg/yolo-voc.cfg?raw=true", + "format": "Darknet", + "link": "https://github.com/AlexeyAB/darknet" + }, + { + "type": "darknet", + "target": "yolo.cfg", + "source": "https://github.com/AlexeyAB/darknet/blob/master/cfg/yolo.cfg?raw=true", + "format": "Darknet", + "link": "https://github.com/AlexeyAB/darknet" + }, + { + "type": "darknet", + "target": "yolo9000.cfg", + "source": "https://github.com/AlexeyAB/darknet/blob/master/cfg/yolo9000.cfg?raw=true", + "format": "Darknet", + "link": "https://github.com/AlexeyAB/darknet" + }, { "type": "darknet", "target": "yolov3-spp.cfg,yolov3-spp.weights", - "source": "https://raw.githubusercontent.com/pjreddie/darknet/master/cfg/yolov3-spp.cfg,https://pjreddie.com/media/files/yolov3-spp.weights", + "source": "https://github.com/pjreddie/darknet/blob/master/cfg/yolov3-spp.cfg?raw=true,https://pjreddie.com/media/files/yolov3-spp.weights", "format": "Darknet", "link": "https://pjreddie.com/darknet/yolo" }, + { + "type": "darknet", + "target": "yolov3-tiny_occlusion_track.cfg", + "source": "https://github.com/AlexeyAB/darknet/blob/master/cfg/yolov3-tiny_occlusion_track.cfg?raw=true", + "format": "Darknet", + "link": "https://github.com/AlexeyAB/darknet" + }, { "type": "dl4j", "target": "darknet19_dl4j_inference.v2.zip", diff --git a/tools/darknet b/tools/darknet index 034d8a38ad9..77b7266c6de 100755 --- a/tools/darknet +++ b/tools/darknet @@ -14,7 +14,7 @@ clean() { sync() { bold "darknet sync" - [ -d "./third_party/src/darknet" ] || git clone --quiet https://github.com/pjreddie/darknet.git "./third_party/src/darknet" + [ -d "./third_party/src/darknet" ] || git clone --quiet https://github.com/AlexeyAB/darknet.git "./third_party/src/darknet" pushd "./third_party/src/darknet" > /dev/null git pull --quiet --prune popd > /dev/null