Collective Knowledge Aggregator proof-of-concept
Crowd results Raw CK browser Graphs Reports Datasets Models Home

This page is outdated! New version is available here.


2fae7c314f90b337

Date: 2017-07-17 18:20:57.250067
License:
Module: experiment.bench.dnn.mobile
Repo: upload
CID (DOI-like distributed ID): 4dcf435bb0d92fa6:2fae7c314f90b337

Files:

Cross-linking (dependencies):

Meta:
{
  "all_raw_results": [
    {
      "behavior_uid": "04d9b8ef3383ae26", 
      "cpu_freqs_after": [
        {
          "0": 1190.4, 
          "1": 1190.4, 
          "2": 1190.4, 
          "3": 1190.4
        }, 
        {
          "0": 1190.4, 
          "1": 1190.4, 
          "2": 1190.4, 
          "3": 1190.4
        }, 
        {
          "0": 1190.4, 
          "1": 1190.4, 
          "2": 1190.4, 
          "3": 1190.4
        }, 
        {
          "0": 1190.4, 
          "1": 1190.4, 
          "2": 1190.4, 
          "3": 1190.4
        }, 
        {
          "0": 1190.4, 
          "1": 1190.4, 
          "2": 1190.4, 
          "3": 1190.4
        }
      ], 
      "cpu_freqs_before": [
        {
          "0": 1190.4, 
          "1": 1190.4, 
          "2": 1190.4, 
          "3": 1190.4
        }, 
        {
          "0": 1190.4, 
          "1": 1190.4, 
          "2": 1190.4, 
          "3": 1190.4
        }, 
        {
          "0": 1190.4, 
          "1": 1190.4, 
          "2": 1190.4, 
          "3": 1190.4
        }, 
        {
          "0": 1190.4, 
          "1": 1190.4, 
          "2": 1190.4, 
          "3": 1190.4
        }, 
        {
          "0": 1190.4, 
          "1": 1190.4, 
          "2": 1190.4, 
          "3": 1190.4
        }
      ], 
      "image_height": 2592, 
      "image_width": 1944, 
      "mispredictions": [
        {
          "correct_answer": "young woman", 
          "mispredicted_image": "misprediction-image-8149b47708f2fdb8.jpg", 
          "misprediction_results": "0.0790 - \"lampshade (814)\"\n0.0641 - \"refrigerator (668)\"\n0.0267 - \"brassiere (872)\"\n0.0200 - \"window shade (904)\"\n0.0188 - \"miniskirt (880)\"\n"
        }, 
        {
          "correct_answer": "box of cereal", 
          "mispredicted_image": "misprediction-image-fa90449c6ad1bd5c.jpg", 
          "misprediction_results": "0.2192 - \"bucket (820)\"\n0.0901 - \"hamper (840)\"\n0.0750 - \"ashcan (752)\"\n0.0741 - \"carton (749)\"\n0.0663 - \"packet (921)\"\n"
        }, 
        {
          "correct_answer": "grape tomato", 
          "mispredicted_image": "misprediction-image-209938376768810f.jpg", 
          "misprediction_results": "0.5603 - \"balloon (233)\"\n0.0577 - \"ping-pong ball (841)\"\n0.0310 - \"hip (328)\"\n0.0291 - \"bell pepper (735)\"\n0.0236 - \"punching bag (846)\"\n"
        }, 
        {
          "correct_answer": "foot", 
          "mispredicted_image": "misprediction-image-470f8771cf21f915.jpg", 
          "misprediction_results": "0.4325 - \"Band Aid (967)\"\n0.2413 - \"scale (521)\"\n0.0478 - \"sandal (751)\"\n0.0439 - \"bathtub (884)\"\n0.0333 - \"sunscreen (810)\"\n"
        }
      ], 
      "prediction": "0.0790 - \"lampshade (814)\"\n0.0641 - \"refrigerator (668)\"\n0.0267 - \"brassiere (872)\"\n0.0200 - \"window shade (904)\"\n0.0188 - \"miniskirt (880)\"\n", 
      "time": [
        8319, 
        6813, 
        7451, 
        5163, 
        5713, 
        6736, 
        5247, 
        5170, 
        5202, 
        5223, 
        5175, 
        6778, 
        6027, 
        7218, 
        6875
      ], 
      "user": "", 
      "xopenme": {
        "execution_time": [
          2.830847, 
          2.308808, 
          2.229577, 
          1.822678, 
          1.784103, 
          2.108788, 
          1.810854, 
          1.828762, 
          1.80264, 
          1.834474, 
          1.800603, 
          2.380157, 
          1.807736, 
          1.837098, 
          2.254001
        ], 
        "execution_time_kernel_0": [
          2.830847, 
          2.308808, 
          2.229577, 
          1.822678, 
          1.784103, 
          2.108788, 
          1.810854, 
          1.828762, 
          1.80264, 
          1.834474, 
          1.800603, 
          2.380157, 
          1.807736, 
          1.837098, 
          2.254001
        ], 
        "execution_time_kernel_1": [
          0, 
          0, 
          0, 
          0, 
          0, 
          0, 
          0, 
          0, 
          0, 
          0, 
          0, 
          0, 
          0, 
          0, 
          0
        ], 
        "execution_time_kernel_2": [
          5.239422, 
          4.261582, 
          4.963194, 
          3.136353, 
          3.711692, 
          4.407799, 
          3.230397, 
          3.138264, 
          3.197281, 
          3.182575, 
          3.170334, 
          4.186928, 
          3.983823, 
          5.119409, 
          4.344524
        ]
      }
    }, 
    {
      "behavior_uid": "95de29a3243669ee", 
      "cpu_freqs_after": [
        {
          "0": 1190.4, 
          "1": 1190.4, 
          "2": 1190.4, 
          "3": 1190.4
        }
      ], 
      "cpu_freqs_before": [
        {
          "0": 1190.4, 
          "1": 1190.4, 
          "2": 1190.4, 
          "3": 1190.4
        }
      ], 
      "image_height": 2000, 
      "image_width": 1440, 
      "prediction": "0.4245 - \"tow truck (282)\"\n0.1238 - \"limousine (270)\"\n0.0742 - \"racer (273)\"\n0.0513 - \"convertible (268)\"\n0.0282 - \"trailer truck (283)\"\n", 
      "time": [
        5534, 
        7056, 
        9101
      ], 
      "user": "", 
      "xopenme": {
        "execution_time": [
          1.485783, 
          1.629285, 
          1.662431
        ], 
        "execution_time_kernel_0": [
          1.485783, 
          1.629285, 
          1.662431
        ], 
        "execution_time_kernel_1": [
          0, 
          0, 
          0
        ], 
        "execution_time_kernel_2": [
          3.803311, 
          5.197962, 
          7.128695
        ]
      }
    }, 
    {
      "behavior_uid": "deb784214783a800", 
      "cpu_freqs_after": [
        {
          "0": 1190.4, 
          "1": 1190.4, 
          "2": 1190.4, 
          "3": 1190.4
        }
      ], 
      "cpu_freqs_before": [
        {
          "0": 1190.4, 
          "1": 1190.4, 
          "2": 1190.4, 
          "3": 1190.4
        }
      ], 
      "image_height": 1200, 
      "image_width": 1200, 
      "mispredictions": [
        {
          "correct_answer": "0.0402 - \"garter snake (482)\"", 
          "mispredicted_image": "misprediction-image-8b422285cae0b366.jpg", 
          "misprediction_results": "0.3488 - \"hognose snake (479)\"\n0.2585 - \"water snake (483)\"\n0.1746 - \"night snake (485)\"\n0.0402 - \"garter snake (482)\"\n0.0211 - \"rock python (487)\"\n"
        }
      ], 
      "prediction": "0.3488 - \"hognose snake (479)\"\n0.2585 - \"water snake (483)\"\n0.1746 - \"night snake (485)\"\n0.0402 - \"garter snake (482)\"\n0.0211 - \"rock python (487)\"\n", 
      "time": [
        4476, 
        4954, 
        5039
      ], 
      "user": "", 
      "xopenme": {
        "execution_time": [
          1.012447, 
          1.0048, 
          1.011357
        ], 
        "execution_time_kernel_0": [
          1.012447, 
          1.0048, 
          1.011357
        ], 
        "execution_time_kernel_1": [
          0, 
          0, 
          0
        ], 
        "execution_time_kernel_2": [
          3.261189, 
          3.725406, 
          3.827376
        ]
      }
    }
  ], 
  "meta": {
    "cpu_abi": "armeabi-v7a", 
    "cpu_name": "Qualcomm Technologies, Inc MSM8916", 
    "cpu_uid": "bc0db4c4bbe612fe", 
    "crowd_uid": "8ea63e841698f4ad", 
    "engine": "TensorFlow CPU", 
    "gpgpu_name": "", 
    "gpgpu_uid": "", 
    "gpu_name": "Qualcomm Adreno (TM) 306", 
    "gpu_uid": "4e79a7ee92836d24", 
    "model": "Inception Imagenet 20151205", 
    "os_name": "Android 6.0.1", 
    "os_uid": "3285b3153a9a4ea4", 
    "plat_name": "SAMSUNG SM-S320VL", 
    "platform_uid": ""
  }
}

API desc:
{}

If you notice copyrighted, inappropriate or illegal content that should not be here, please report us as soon as possible and we will try to remove it within 48hours!

Developed by Grigori Fursin           
Implemented as a CK workflow
                         
   
                      Hosted at