Browse Source

Merge pull request #10 from k3oni/master

bring dev current
windows^2
Florian N 9 years ago
committed by GitHub
parent
commit
5dab566dc6
  1. 32
      README.rst
  2. 34
      pydash/services.py
  3. 14
      pydash/static/css/dashboard.css
  4. 50
      pydash/static/js/Chart.min.js
  5. 4
      pydash/templates/main.html
  6. 61
      pydash/views.py
  7. 2
      requirements.txt
  8. 12
      setup.py

32
README.rst

@ -1,23 +1,20 @@
pyDash - v1.4.4
pyDash - v1.4.6
===============
A reusable django app for monitoring your linux server.
Supported Python versions:
- Python 2.x
Requirements:
- Django >= 1.5
::
Python 2.x
Python 3.x
`View Demo <http://pydash.hostechs.com/>`_
Requirements:
::
user: admin
pass: admin
Django >= 1.5
Installation
@ -112,13 +109,28 @@ specific data:
/info/platform/osname/ - OS Name
/info/platform/kernel/ - Kernel
/info/getcpus/cpucount/ - Number of CPU cores
/info/getcpu
/info/getcpus/cputype/ - Type/Name of CPU
/info/memory/ - Memory Usage
/info/cpuusage/ - CPU Usage in percentage(%), free and used
/info/getdisk/ - Disk Usage
/info/getusers/ - Online Users
/info/getips/ - IP Addresses
/info/gettraffic/ - Internet Traffic
/info/getdiskio/ - Disk Reads/Writes
/info/proc/ - Running Processes
/info/loadaverage/ - Load Average
/info/getnetstat/ - Netstat
To see the format of the JSON returned datasets or data you can access any of the URLs from your browser
as http://youpydaship/url , ex. http://demo.pydash.net/info/uptime/ .
OS Support
==========
pyDash was tested and runs under the following OSes:
::
- Centos
- Fedora
- Ubuntu

34
pydash/services.py

@ -18,7 +18,7 @@ def get_uptime():
uptime_time = str(timedelta(seconds=uptime_seconds))
data = uptime_time.split('.', 1)[0]
except Exception, err:
except Exception as err:
data = str(err)
return data
@ -51,7 +51,7 @@ def get_ipaddress():
data = ips
except Exception, err:
except Exception as err:
data = str(err)
return data
@ -75,7 +75,7 @@ def get_cpus():
data = {'cpus': cpus, 'type': data}
except Exception, err:
except Exception as err:
data = str(err)
return data
@ -95,7 +95,7 @@ def get_users():
else:
data = [i.split(None, 3) for i in data]
except Exception, err:
except Exception as err:
data = str(err)
return data
@ -126,7 +126,7 @@ def get_traffic(request):
data = all_traffic
except Exception, err:
except Exception as err:
data = str(err)
return data
@ -145,7 +145,7 @@ def get_platform():
data = {'osname': osname, 'hostname': uname[1], 'kernel': uname[2]}
except Exception, err:
except Exception as err:
data = str(err)
return data
@ -163,7 +163,7 @@ def get_disk():
data = [i.split(None, 6) for i in data]
except Exception, err:
except Exception as err:
data = str(err)
return data
@ -199,7 +199,7 @@ def get_disk_rw():
data = rws
except Exception, err:
except Exception as err:
data = str(err)
return data
@ -211,21 +211,27 @@ def get_mem():
"""
try:
pipe = os.popen(
"free -tmo | " + "grep 'Mem' | " + "awk '{print $2,$4}'")
"free -tmo | " + "grep 'Mem' | " + "awk '{print $2,$4,$6,$7}'")
data = pipe.read().strip().split()
pipe.close()
allmem = int(data[0])
freemem = int(data[1])
buffers = int(data[2])
cachedmem = int(data[3])
# Memory in buffers + cached is actually available, so we count it
# as free. See http://www.linuxatemyram.com/ for details
freemem += buffers + cachedmem
percent = (100 - ((freemem * 100) / allmem))
usage = (allmem - freemem)
mem_usage = {'usage': usage, 'free': freemem, 'percent': percent}
mem_usage = {'usage': usage, 'buffers': buffers, 'cached': cachedmem, 'free': freemem, 'percent': percent}
data = mem_usage
except Exception, err:
except Exception as err:
data = str(err)
return data
@ -258,7 +264,7 @@ def get_cpu_usage():
data = cpu_used
except Exception, err:
except Exception as err:
data = str(err)
return data
@ -270,7 +276,7 @@ def get_load():
"""
try:
data = os.getloadavg()[0]
except Exception, err:
except Exception as err:
data = str(err)
return data
@ -289,7 +295,7 @@ def get_netstat():
data = [i.split(None, 4) for i in data]
except Exception, err:
except Exception as err:
data = str(err)
return data

14
pydash/static/css/dashboard.css

@ -397,7 +397,19 @@ h6.bigstats {
}
.memu {
margin: 0.5em;
border: 0 solid rgb(249, 134, 33);
border: 0 solid rgb(247, 70, 74);
border-left-width: 1em;
padding: 0 0.3em;
}
.memb {
margin: 0.5em;
border: 0 solid rgb(0, 154, 205);
border-left-width: 1em;
padding: 0 0.3em;
}
.memc {
margin: 0.5em;
border: 0 solid rgb(255, 185, 15);
border-left-width: 1em;
padding: 0 0.3em;
}

50
pydash/static/js/Chart.min.js
File diff suppressed because it is too large
View File

4
pydash/templates/main.html

@ -128,6 +128,8 @@
<div>
<span class="memf">{% trans "Free" %}</span>
<span class="memu">{% trans "Used" %}</span>
<span class="memb">{% trans "Buffers" %}</span>
<span class="memc">{% trans "Cached" %}</span>
</div>
</div>
<!-- /widget-content -->
@ -340,7 +342,7 @@ var mem_ctx = $("#memoryChart").get(0).getContext("2d");
$.getJSON(pydashUrls['memusage'], function(data) {
var options = {
animation : false,
pointDotRadius : 2,
pointDotRadius : 4,
scaleLabel : "<%=value%> Mb"
}
memChart.Line(data, options);

61
pydash/views.py

@ -47,7 +47,7 @@ def get_pydash_urls():
# E.g. reverse('platform', args=[0])[:-2] -> /info/platform/
# reverse('getcpus', args[0])[:-2] -> /info/getcpus/
results[name] = reverse(name, args=[0])[:-2]
return results
return json.dumps(results)
@login_required(login_url=reverse_lazy('login'))
@ -271,6 +271,8 @@ def memusage(request):
"""
datasets_free = []
datasets_used = []
datasets_buffers = []
datasets_cached = []
try:
mem_usage = services.get_mem()
@ -278,17 +280,21 @@ def memusage(request):
mem_usage = 0
try:
cookies = request._cookies['memory_usage']
cookies = request.COOKIES['memory_usage']
except Exception:
cookies = None
if not cookies:
datasets_free.append(0)
datasets_used.append(0)
datasets_buffers.append(0)
datasets_cached.append(0)
else:
datasets = json.loads(cookies)
datasets_free = datasets[0]
datasets_used = datasets[1]
atasets_buffers = datasets[2]
datasets_cached = datasets[3]
if len(datasets_free) > 10:
while datasets_free:
@ -300,6 +306,16 @@ def memusage(request):
del datasets_used[0]
if len(datasets_used) == 10:
break
if len(datasets_buffers) > 10:
while datasets_buffers:
del datasets_buffers[0]
if len(datasets_buffers) == 10:
break
if len(datasets_cached) > 10:
while datasets_cached:
del datasets_cached[0]
if len(datasets_cached) == 10:
break
if len(datasets_free) <= 9:
datasets_free.append(int(mem_usage['free']))
if len(datasets_free) == 10:
@ -310,7 +326,16 @@ def memusage(request):
if len(datasets_used) == 10:
datasets_used.append(int(mem_usage['usage']))
del datasets_used[0]
if len(datasets_buffers) <= 9:
datasets_buffers.append(int(mem_usage['buffers']))
if len(datasets_buffers) == 10:
datasets_buffers.append(int(mem_usage['buffers']))
del datasets_buffers[0]
if len(datasets_cached) <= 9:
datasets_cached.append(int(mem_usage['cached']))
if len(datasets_cached) == 10:
datasets_cached.append(int(mem_usage['cached']))
del datasets_cached[0]
# Some fix division by 0 Chart.js
if len(datasets_free) == 10:
if sum(datasets_free) == 0:
@ -322,9 +347,9 @@ def memusage(request):
'labels': [""] * 10,
'datasets': [
{
"fillColor": "rgba(249,134,33,0.5)",
"strokeColor": "rgba(249,134,33,1)",
"pointColor": "rgba(249,134,33,1)",
"fillColor": "rgba(247,70,74,0.5)",
"strokeColor": "rgba(247,70,74,1)",
"pointColor": "rgba(247,70,74,1)",
"pointStrokeColor": "#fff",
"data": datasets_used
},
@ -334,11 +359,25 @@ def memusage(request):
"pointColor": "rgba(43,214,66,1)",
"pointStrokeColor": "#fff",
"data": datasets_free
}
},
{
"fillColor": "rgba(0,154,205,0.5)",
"strokeColor": "rgba(0,154,205,1)",
"pointColor": "rgba(0,154,205,1)",
"pointStrokeColor": "#fff",
"data": datasets_buffers
},
{
"fillColor": "rgba(255,185,15,0.5)",
"strokeColor": "rgba(255,185,15,1)",
"pointColor": "rgba(265,185,15,1)",
"pointStrokeColor": "#fff",
"data": datasets_cached
}
]
}
cookie_memory = [datasets_free, datasets_used]
cookie_memory = [datasets_free, datasets_used, datasets_buffers, datasets_cached]
data = json.dumps(memory)
response = HttpResponse()
response['Content-Type'] = "text/javascript"
@ -360,7 +399,7 @@ def loadaverage(request):
load_average = 0
try:
cookies = request._cookies['load_average']
cookies = request.COOKIES['load_average']
except Exception:
cookies = None
@ -427,7 +466,7 @@ def gettraffic(request):
traffic = 0
try:
cookies = request._cookies['traffic']
cookies = request.COOKIES['traffic']
except Exception:
cookies = None
@ -549,7 +588,7 @@ def getdiskio(request):
diskrw = 0
try:
cookies = request._cookies['diskrw']
cookies = request.COOKIES['diskrw']
except Exception:
cookies = None

2
requirements.txt

@ -1 +1 @@
django==1.6.1
django>=1.5

12
setup.py

@ -8,7 +8,7 @@ os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-pydash-app',
version='1.4.4',
version='1.4.6',
packages=['pydash'],
include_package_data=True,
license='MIT',
@ -18,8 +18,8 @@ setup(
author='Florian N.',
author_email='michaelneagu@gmail.com',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment'
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
@ -27,6 +27,12 @@ setup(
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],

Loading…
Cancel
Save