Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- Listen at 0.0.0.0:6688
- Traceback (most recent call last):
- File "/usr/lib/python2.7/wsgiref/handlers.py", line 85, in run
- self.result = application(self.environ, self.start_response)
- File "/usr/local/lib/python2.7/dist-packages/prometheus_client/exposition.py", line 52, in prometheus_app
- status, header, output = _bake_output(registry, accept_header, params)
- File "/usr/local/lib/python2.7/dist-packages/prometheus_client/exposition.py", line 40, in _bake_output
- output = encoder(registry)
- File "/usr/local/lib/python2.7/dist-packages/prometheus_client/exposition.py", line 137, in generate_latest
- output.append(sample_line(s))
- File "/usr/local/lib/python2.7/dist-packages/prometheus_client/exposition.py", line 103, in sample_line
- line.name, labelstr, floatToGoString(line.value), timestamp)
- File "/usr/local/lib/python2.7/dist-packages/prometheus_client/utils.py", line 9, in floatToGoString
- d = float(d)
- ValueError: ('could not convert string to float: DISK', Metric(hadoop_hdfs_datanode_volume_state, Volume infomation in each path and in each mode, gauge, , [Sample(name='hadoop_hdfs_datanode_volume_state', labels={'cluster': 'hadoopcluster_monarch', 'state': 'freeSpace', 'version': u'3.2.1, rb3cbbb467e22ea829b3808f4b7b01d07e0bf3842', '_target': u'kworker1', 'path': '/usr/local/hadoop/data/dataNode'}, value=12488241152, timestamp=None, exemplar=None), Sample(name='hadoop_hdfs_datanode_volume_state', labels={'cluster': 'hadoopcluster_monarch', 'state': 'reservedSpaceForReplicas', 'version': u'3.2.1, rb3cbbb467e22ea829b3808f4b7b01d07e0bf3842', '_target': u'kworker1', 'path': '/usr/local/hadoop/data/dataNode'}, value=0, timestamp=None, exemplar=None), Sample(name='hadoop_hdfs_datanode_volume_state', labels={'cluster': 'hadoopcluster_monarch', 'state': 'reservedSpace', 'version': u'3.2.1, rb3cbbb467e22ea829b3808f4b7b01d07e0bf3842', '_target': u'kworker1', 'path': '/usr/local/hadoop/data/dataNode'}, value=0, timestamp=None, exemplar=None), Sample(name='hadoop_hdfs_datanode_volume_state', labels={'cluster': 'hadoopcluster_monarch', 'state': 'storageType', 'version': u'3.2.1, rb3cbbb467e22ea829b3808f4b7b01d07e0bf3842', '_target': u'kworker1', 'path': '/usr/local/hadoop/data/dataNode'}, value='DISK', timestamp=None, exemplar=None), Sample(name='hadoop_hdfs_datanode_volume_state', labels={'cluster': 'hadoopcluster_monarch', 'state': 'numBlocks', 'version': u'3.2.1, rb3cbbb467e22ea829b3808f4b7b01d07e0bf3842', '_target': u'kworker1', 'path': '/usr/local/hadoop/data/dataNode'}, value=2, timestamp=None, exemplar=None), Sample(name='hadoop_hdfs_datanode_volume_state', labels={'cluster': 'hadoopcluster_monarch', 'state': 'usedSpace', 'version': u'3.2.1, rb3cbbb467e22ea829b3808f4b7b01d07e0bf3842', '_target': u'kworker1', 'path': '/usr/local/hadoop/data/dataNode'}, value=57344, timestamp=None, exemplar=None), Sample(name='hadoop_hdfs_datanode_volume_state', labels={'cluster': 'hadoopcluster_monarch', 'state': 'freeSpace', 'version': u'3.2.1, rb3cbbb467e22ea829b3808f4b7b01d07e0bf3842', '_target': u'kworker2', 'path': '/usr/local/hadoop/data/dataNode'}, value=13066493952, timestamp=None, exemplar=None), Sample(name='hadoop_hdfs_datanode_volume_state', labels={'cluster': 'hadoopcluster_monarch', 'state': 'reservedSpaceForReplicas', 'version': u'3.2.1, rb3cbbb467e22ea829b3808f4b7b01d07e0bf3842', '_target': u'kworker2', 'path': '/usr/local/hadoop/data/dataNode'}, value=0, timestamp=None, exemplar=None), Sample(name='hadoop_hdfs_datanode_volume_state', labels={'cluster': 'hadoopcluster_monarch', 'state': 'reservedSpace', 'version': u'3.2.1, rb3cbbb467e22ea829b3808f4b7b01d07e0bf3842', '_target': u'kworker2', 'path': '/usr/local/hadoop/data/dataNode'}, value=0, timestamp=None, exemplar=None), Sample(name='hadoop_hdfs_datanode_volume_state', labels={'cluster': 'hadoopcluster_monarch', 'state': 'storageType', 'version': u'3.2.1, rb3cbbb467e22ea829b3808f4b7b01d07e0bf3842', '_target': u'kworker2', 'path': '/usr/local/hadoop/data/dataNode'}, value='DISK', timestamp=None, exemplar=None), Sample(name='hadoop_hdfs_datanode_volume_state', labels={'cluster': 'hadoopcluster_monarch', 'state': 'numBlocks', 'version': u'3.2.1, rb3cbbb467e22ea829b3808f4b7b01d07e0bf3842', '_target': u'kworker2', 'path': '/usr/local/hadoop/data/dataNode'}, value=2, timestamp=None, exemplar=None), Sample(name='hadoop_hdfs_datanode_volume_state', labels={'cluster': 'hadoopcluster_monarch', 'state': 'usedSpace', 'version': u'3.2.1, rb3cbbb467e22ea829b3808f4b7b01d07e0bf3842', '_target': u'kworker2', 'path': '/usr/local/hadoop/data/dataNode'}, value=57344, timestamp=None, exemplar=None)]))
Add Comment
Please, Sign In to add comment