Collective Knowledge Aggregator proof-of-concept
Crowd results Raw CK browser Graphs Reports Datasets Models Home

This page is outdated! New version is available here.


ddb4fce51814f41b

Date: 2017-06-25 06:55:28.633017
License:
Module: experiment.bench.dnn.mobile
Repo: upload
CID (DOI-like distributed ID): 4dcf435bb0d92fa6:ddb4fce51814f41b

Files:

Cross-linking (dependencies):

Meta:
{
  "all_raw_results": [
    {
      "behavior_uid": "cce455da41c22a18", 
      "cpu_freqs_after": [
        {
          "1": 1516.8, 
          "2": 1516.8, 
          "4": 1209.6, 
          "5": 1209.6, 
          "6": 1209.6, 
          "7": 1209.6
        }
      ], 
      "cpu_freqs_before": [
        {
          "0": 1516.8, 
          "1": 1516.8, 
          "2": 1516.8, 
          "4": 1209.6, 
          "5": 1209.6, 
          "6": 1209.6, 
          "7": 1209.6
        }
      ], 
      "image_height": 3024, 
      "image_width": 4032, 
      "prediction": "0.9835 - \"n03793489 mouse, computer mouse\"\n0.0042 - \"n04548280 wall clock\"\n0.0015 - \"n03532672 hook, claw\"\n0.0013 - \"n02988304 CD player\"\n0.0013 - \"n04317175 stethoscope\"\n", 
      "time": [
        12431, 
        17186, 
        11556
      ], 
      "user": "", 
      "xopenme": {
        "execution_time": [
          8.960379, 
          14.254998, 
          9.298068
        ], 
        "execution_time_kernel_0": [
          8.960379, 
          14.254998, 
          9.298068
        ], 
        "execution_time_kernel_1": [
          0.863833, 
          0.823593, 
          0.836423
        ], 
        "execution_time_kernel_2": [
          2.343162, 
          1.897855, 
          1.201912
        ]
      }
    }, 
    {
      "behavior_uid": "99492c2fe07c77dd", 
      "cpu_freqs_after": [
        {
          "2": 1516.8, 
          "3": 1516.8, 
          "4": 1209.6, 
          "5": 1209.6, 
          "6": 1209.6, 
          "7": 1209.6
        }
      ], 
      "cpu_freqs_before": [
        {
          "1": 1516.8, 
          "2": 1516.8, 
          "4": 1209.6, 
          "5": 1209.6, 
          "6": 1209.6, 
          "7": 1209.6
        }
      ], 
      "image_height": 32, 
      "image_width": 32, 
      "mispredictions": [
        {
          "correct_answer": "Low resolution image", 
          "mispredicted_image": "misprediction-image-6435f92514e3f819.jpg", 
          "misprediction_results": "0.2525 - \"n03840681 ocarina, sweet potato\"\n0.1003 - \"n04579145 whiskey jug\"\n0.0699 - \"n02606052 rock beauty, Holocanthus tricolor\"\n0.0512 - \"n04023962 punching bag, punch bag, punching ball, punchball\"\n0.0490 - \"n03825788 nipple\"\n"
        }
      ], 
      "prediction": "0.2525 - \"n03840681 ocarina, sweet potato\"\n0.1003 - \"n04579145 whiskey jug\"\n0.0699 - \"n02606052 rock beauty, Holocanthus tricolor\"\n0.0512 - \"n04023962 punching bag, punch bag, punching ball, punchball\"\n0.0490 - \"n03825788 nipple\"\n", 
      "time": [
        17577, 
        12594, 
        12676
      ], 
      "user": "", 
      "xopenme": {
        "execution_time": [
          15.907401, 
          11.289039, 
          9.598497
        ], 
        "execution_time_kernel_0": [
          15.907401, 
          11.289039, 
          9.598497
        ], 
        "execution_time_kernel_1": [
          0.013627, 
          0.008132, 
          0.020718
        ], 
        "execution_time_kernel_2": [
          1.416696, 
          1.077677, 
          2.889093
        ]
      }
    }, 
    {
      "behavior_uid": "f3fbceeeb6934770", 
      "cpu_freqs_after": [
        {
          "1": 1516.8, 
          "3": 1516.8, 
          "4": 1209.6, 
          "5": 1209.6, 
          "6": 1209.6, 
          "7": 1209.6
        }
      ], 
      "cpu_freqs_before": [
        {
          "0": 1516.8, 
          "2": 1516.8, 
          "4": 1209.6, 
          "5": 1209.6, 
          "6": 1209.6, 
          "7": 1209.6
        }
      ], 
      "image_height": 720, 
      "image_width": 1000, 
      "mispredictions": [
        {
          "correct_answer": "Mri", 
          "mispredicted_image": "misprediction-image-c653b514245ff896.jpg", 
          "misprediction_results": "0.5272 - \"n04311174 steel drum\"\n0.1132 - \"n03920288 Petri dish\"\n0.0630 - \"n04332243 strainer\"\n0.0237 - \"n03447721 gong, tam-tam\"\n0.0231 - \"n04192698 shield, buckler\"\n"
        }
      ], 
      "prediction": "0.5272 - \"n04311174 steel drum\"\n0.1132 - \"n03920288 Petri dish\"\n0.0630 - \"n04332243 strainer\"\n0.0237 - \"n03447721 gong, tam-tam\"\n0.0231 - \"n04192698 shield, buckler\"\n", 
      "time": [
        11783, 
        13702, 
        12246
      ], 
      "user": "", 
      "xopenme": {
        "execution_time": [
          10.315063, 
          10.605391, 
          10.622898
        ], 
        "execution_time_kernel_0": [
          10.315063, 
          10.605391, 
          10.622898
        ], 
        "execution_time_kernel_1": [
          0.078782, 
          0.067539, 
          0.087997
        ], 
        "execution_time_kernel_2": [
          1.191357, 
          2.799414, 
          1.161135
        ]
      }
    }, 
    {
      "behavior_uid": "8ddc9ee6ff11491c", 
      "cpu_freqs_after": [
        {
          "1": 1516.8, 
          "2": 1516.8, 
          "4": 1209.6, 
          "5": 1209.6, 
          "6": 1209.6, 
          "7": 1209.6
        }
      ], 
      "cpu_freqs_before": [
        {
          "0": 1516.8, 
          "1": 1516.8, 
          "2": 1516.8, 
          "4": 1209.6, 
          "5": 1209.6, 
          "6": 1209.6, 
          "7": 1209.6
        }
      ], 
      "image_height": 552, 
      "image_width": 766, 
      "mispredictions": [
        {
          "correct_answer": "Soldier", 
          "mispredicted_image": "misprediction-image-5b1d6d3988dd458e.jpg", 
          "misprediction_results": "0.9702 - \"n03763968 military uniform\"\n0.0053 - \"n02916936 bulletproof vest\"\n0.0026 - \"n02883205 bow tie, bow-tie, bowtie\"\n0.0017 - \"n04479046 trench coat\"\n0.0017 - \"n04090263 rifle\"\n"
        }
      ], 
      "prediction": "0.9702 - \"n03763968 military uniform\"\n0.0053 - \"n02916936 bulletproof vest\"\n0.0026 - \"n02883205 bow tie, bow-tie, bowtie\"\n0.0017 - \"n04479046 trench coat\"\n0.0017 - \"n04090263 rifle\"\n", 
      "time": [
        14418, 
        12536, 
        10995
      ], 
      "user": "", 
      "xopenme": {
        "execution_time": [
          11.191973, 
          11.200605, 
          8.641407
        ], 
        "execution_time_kernel_0": [
          11.191973, 
          11.200605, 
          8.641407
        ], 
        "execution_time_kernel_1": [
          0.05405, 
          0.058237, 
          0.039678
        ], 
        "execution_time_kernel_2": [
          2.981837, 
          1.045931, 
          2.152437
        ]
      }
    }, 
    {
      "behavior_uid": "743b988d5952f418", 
      "cpu_freqs_after": [
        {
          "0": 1516.8, 
          "1": 1516.8, 
          "2": 1516.8, 
          "4": 1209.6, 
          "5": 1209.6, 
          "6": 1209.6, 
          "7": 1209.6
        }
      ], 
      "cpu_freqs_before": [
        {
          "0": 1516.8, 
          "1": 1516.8, 
          "2": 1516.8, 
          "4": 1209.6, 
          "5": 1209.6, 
          "6": 1209.6, 
          "7": 1209.6
        }
      ], 
      "image_height": 438, 
      "image_width": 549, 
      "mispredictions": [
        {
          "correct_answer": "bristle starfish", 
          "mispredicted_image": "misprediction-image-723a2f8dba505c67.jpg", 
          "misprediction_results": "0.6220 - \"n09256479 coral reef\"\n0.1037 - \"n02319095 sea urchin\"\n0.0865 - \"n02321529 sea cucumber, holothurian\"\n0.0741 - \"n01986214 hermit crab\"\n0.0536 - \"n01914609 sea anemone, anemone\"\n"
        }
      ], 
      "prediction": "0.6220 - \"n09256479 coral reef\"\n0.1037 - \"n02319095 sea urchin\"\n0.0865 - \"n02321529 sea cucumber, holothurian\"\n0.0741 - \"n01986214 hermit crab\"\n0.0536 - \"n01914609 sea anemone, anemone\"\n", 
      "time": [
        12206, 
        11911, 
        12232
      ], 
      "user": "", 
      "xopenme": {
        "execution_time": [
          9.326352, 
          8.988847, 
          9.418199
        ], 
        "execution_time_kernel_0": [
          9.326352, 
          8.988847, 
          9.418199
        ], 
        "execution_time_kernel_1": [
          0.03868, 
          0.033713, 
          0.033132
        ], 
        "execution_time_kernel_2": [
          2.637088, 
          2.658123, 
          2.576411
        ]
      }
    }, 
    {
      "behavior_uid": "96bf44b068ccb9dc", 
      "cpu_freqs_after": [
        {
          "0": 1516.8, 
          "1": 1516.8, 
          "2": 1516.8, 
          "4": 1209.6, 
          "5": 1209.6, 
          "6": 1209.6, 
          "7": 1209.6
        }
      ], 
      "cpu_freqs_before": [
        {
          "1": 1516.8, 
          "2": 1516.8, 
          "3": 1516.8, 
          "4": 1209.6, 
          "5": 1209.6, 
          "6": 1209.6, 
          "7": 1209.6
        }
      ], 
      "image_height": 428, 
      "image_width": 499, 
      "mispredictions": [
        {
          "correct_answer": "Pinecones", 
          "mispredicted_image": "misprediction-image-cd59e048dcaf4f92.jpg", 
          "misprediction_results": "0.2151 - \"n01729322 hognose snake, puff adder, sand viper\"\n0.1651 - \"n01753488 horned viper, cerastes, sand viper, horned asp, Cerastes cornutus\"\n0.1092 - \"n13052670 hen-of-the-woods, hen of the woods, Polyporus frondosus, Grifola frondosa\"\n0.0533 - \"n01756291 sidewinder, horned rattlesnake, Crotalus cerastes\"\n0.0447 - \"n01734418 king snake, kingsnake\"\n"
        }
      ], 
      "prediction": "0.2151 - \"n01729322 hognose snake, puff adder, sand viper\"\n0.1651 - \"n01753488 horned viper, cerastes, sand viper, horned asp, Cerastes cornutus\"\n0.1092 - \"n13052670 hen-of-the-woods, hen of the woods, Polyporus frondosus, Grifola frondosa\"\n0.0533 - \"n01756291 sidewinder, horned rattlesnake, Crotalus cerastes\"\n0.0447 - \"n01734418 king snake, kingsnake\"\n", 
      "time": [
        11002, 
        10870, 
        10479
      ], 
      "user": "", 
      "xopenme": {
        "execution_time": [
          9.12061, 
          8.308331, 
          8.029485
        ], 
        "execution_time_kernel_0": [
          9.12061, 
          8.308331, 
          8.029485
        ], 
        "execution_time_kernel_1": [
          0.054593, 
          0.040334, 
          0.025834
        ], 
        "execution_time_kernel_2": [
          1.560273, 
          2.314246, 
          2.193276
        ]
      }
    }, 
    {
      "behavior_uid": "4cc64692c4487e9c", 
      "cpu_freqs_after": [
        {
          "0": 1516.8, 
          "1": 1516.8, 
          "4": 1209.6, 
          "5": 1209.6, 
          "6": 1209.6, 
          "7": 1209.6
        }
      ], 
      "cpu_freqs_before": [
        {
          "1": 1516.8, 
          "2": 1516.8, 
          "4": 1209.6, 
          "5": 1209.6, 
          "6": 1209.6, 
          "7": 1209.6
        }
      ], 
      "image_height": 448, 
      "image_width": 384, 
      "mispredictions": [
        {
          "correct_answer": "Conifer, pinetree.", 
          "mispredicted_image": "misprediction-image-f50b891c4b789134.jpg", 
          "misprediction_results": "0.2815 - \"n04326547 stone wall\"\n0.2026 - \"n03733281 maze, labyrinth\"\n0.1847 - \"n07714990 broccoli\"\n0.0471 - \"n09246464 cliff, drop, drop-off\"\n0.0362 - \"n03743016 megalith, megalithic structure\"\n"
        }
      ], 
      "prediction": "0.2815 - \"n04326547 stone wall\"\n0.2026 - \"n03733281 maze, labyrinth\"\n0.1847 - \"n07714990 broccoli\"\n0.0471 - \"n09246464 cliff, drop, drop-off\"\n0.0362 - \"n03743016 megalith, megalithic structure\"\n", 
      "time": [
        11532, 
        13068, 
        13247
      ], 
      "user": "", 
      "xopenme": {
        "execution_time": [
          9.252317, 
          10.219605, 
          10.908071
        ], 
        "execution_time_kernel_0": [
          9.252317, 
          10.219605, 
          10.908071
        ], 
        "execution_time_kernel_1": [
          0.034287, 
          0.033212, 
          0.025119
        ], 
        "execution_time_kernel_2": [
          1.984582, 
          2.607663, 
          2.135593
        ]
      }
    }
  ], 
  "meta": {
    "cpu_abi": "arm64-v8a", 
    "cpu_name": "Qualcomm Technologies, Inc MSM8952", 
    "cpu_uid": "1f1cb8b93485c984", 
    "crowd_uid": "a7340ffbefcb5923", 
    "engine": "Caffe CPU", 
    "gpgpu_name": "", 
    "gpgpu_uid": "", 
    "gpu_name": "Qualcomm Adreno (TM) 405", 
    "gpu_uid": "c5bebc57566d5cd2", 
    "model": "BVLC AlexNet", 
    "os_name": "Android 6.0.1", 
    "os_uid": "3285b3153a9a4ea4", 
    "plat_name": "ZTE K88", 
    "platform_uid": "fead928c7f41819c"
  }
}

API desc:
{}

If you notice copyrighted, inappropriate or illegal content that should not be here, please report us as soon as possible and we will try to remove it within 48hours!

Developed by Grigori Fursin           
Implemented as a CK workflow
                         
   
                      Hosted at