Skip to content

Commit

Permalink
完成周报,优化数据上传与导入,优化处理指标。
Browse files Browse the repository at this point in the history
  • Loading branch information
jiangjiahe committed Nov 18, 2018
1 parent 5537e3a commit a22c088
Show file tree
Hide file tree
Showing 59 changed files with 291 additions and 195 deletions.
1 change: 1 addition & 0 deletions DataEcharts/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,7 @@
# 'USER': 'root',
# 'PASSWORD': '123456',
# 测试环境
# 'NAME': 'DjangoEcharts_test',
'NAME': 'DjangoEcharts',
'USER': 'root',
'PASSWORD': '123456',
Expand Down
31 changes: 5 additions & 26 deletions report/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ class MalfunctionData(models.Model):
receiptNumber = models.CharField(db_column='receiptNumber', primary_key=True, max_length=20)
receiptSerialNumber = models.CharField(db_column='receiptSerialNumber', max_length=20)
receiptStatus = models.CharField(db_column='receiptStatus', max_length=10, blank=True, null=True)
title = models.CharField(max_length=200)
title = models.CharField(max_length=500)
category = models.CharField(max_length=200, blank=True, null=True)
distributeTime = models.DateTimeField(db_column='distributeTime', db_index=True)
processTime = models.IntegerField(db_column='processTime', blank=True, null=True)
Expand Down Expand Up @@ -84,29 +84,7 @@ class StatisticsQuarterlyQuality(models.Model):

class Meta:
db_table = 'report_statistics_quarterly_quality'


class StatisticsQuarterlySpecificDealTime(models.Model):
city = models.CharField(max_length=10, blank=True, null=True)
beginDate = models.DateField()
endDate = models.DateField()
amount = models.FloatField(blank=True, null=True)
reason = models.CharField(max_length=20, blank=True, null=True)

class Meta:
db_table = 'report_statistics_quarterly_specific_deal_time'


class StatisticsQuarterlyReason(models.Model):
city = models.CharField(max_length=10, blank=True, null=True)
reason = models.CharField(max_length=20, blank=True, null=True)
result = models.IntegerField(blank=True, null=True)
beginDate = models.DateField()
endDate = models.DateField()

class Meta:
# managed = False
db_table = 'report_statistics_quarterly_reason'
unique_together = ('beginDate', 'endDate', 'city')


class StatisticsQuarterlySpecificDealtimeAmount(models.Model):
Expand Down Expand Up @@ -140,12 +118,13 @@ class StatisticsMonthlyQuality(models.Model):

class Meta:
# managed = False
unique_together = ('yearNum', 'monthNum', 'city')
db_table = 'report_statistics_monthly_quality'


class MalfunctionLongtime(models.Model):
receiptNumber = models.CharField(db_column='receiptNumber', primary_key=True, max_length=20)
title = models.CharField(max_length=200)
title = models.CharField(max_length=500)
category = models.CharField(max_length=200, blank=True, null=True)
city = models.CharField(db_column='city', max_length=50, blank=True, null=True, db_index=True)
processTime = models.IntegerField(db_column='processTime', blank=True, null=True)
Expand All @@ -157,7 +136,7 @@ class Meta:

class MalfunctionOnTrack(models.Model):
receiptNumber = models.CharField(db_column='receiptNumber', primary_key=True, max_length=20)
title = models.CharField(max_length=200)
title = models.CharField(max_length=500)
category = models.CharField(max_length=200, blank=True, null=True)
receiptStatus = models.CharField(db_column='receiptStatus', max_length=10, blank=True, null=True)
city = models.CharField(db_column='city', max_length=50, blank=True, null=True, db_index=True)
Expand Down
94 changes: 61 additions & 33 deletions report/statistics_views.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,6 @@ def get(self, request, begin_date, end_date):


class IntimeRateView(View):

def get(self, request, begin_date, end_date):
# print(begin_date, end_date)
st = datetime.datetime.now()
Expand Down Expand Up @@ -113,7 +112,7 @@ def get(self, request, begin_date, end_date):
st = datetime.datetime.now()
result_json = dict()
try:
qs = StatisticsQuarterlyQuality.objects.filter(beginDate=begin_date, endDate=end_date)
qs = StatisticsQuarterlyQuality.objects.filter(beginDate=begin_date, endDate=end_date, area__isnull=False)
if qs:
result_json = dict(result=[])
for item in qs:
Expand All @@ -129,19 +128,21 @@ def get(self, request, begin_date, end_date):
result_json = collect_deal_quality(5, 2018, 1, 1, 1, begin_date, end_date)
rs = result_json['result']
for r in rs:
StatisticsQuarterlyQuality(beginDate=begin_date, endDate=end_date,
area=r.get('area'),
city=r.get('city', ''),
IntimeRate=r.get('IntimeRate', ''),
SignRate=r.get('SignRate', ''),
Over48Rate=r.get('Over48Rate', ''),
AverageTime=r.get('AverageTime', '')).save()
result_json['process_time'] = str(datetime.datetime.now() - st)
return JsonResponse(data=result_json, safe=False)
StatisticsQuarterlyQuality.objects.get_or_create(beginDate=begin_date, endDate=end_date,
city=r.get('city', ''))
StatisticsQuarterlyQuality.objects.filter(beginDate=begin_date, endDate=end_date,
city=r.get('city', '')).update(area=r.get('area'),
IntimeRate=r.get('IntimeRate', ''),
SignRate=r.get('SignRate', ''),
Over48Rate=r.get('Over48Rate', ''),
AverageTime=r.get('AverageTime', ''))
result_json['process_time'] = str(datetime.datetime.now() - st)
return JsonResponse(data=result_json, safe=False)

except Exception as e:
result_json['status'] = 'fail'
result_json['msg'] = str(e)
return JsonResponse(data=result_json, safe=False)
return JsonResponse(data=result_json, safe=False)


class SpecificDealtimeAmountView(View):
Expand Down Expand Up @@ -214,7 +215,7 @@ def get(self, request, year, month):
rs_dict.update(rs)
for item in rs[profession]:
value_list = list(item.values())
StatisticsTop10Ne(index=value_list[0],
StatisticsTop10Ne(index=str(value_list[0]),
city=value_list[1],
ne=value_list[2],
amount=value_list[3],
Expand Down Expand Up @@ -318,30 +319,26 @@ def get(self, request, year, month):
class FileUploadView(APIView):
parser_classes = (MultiPartParser, FileUploadParser,)

# def post(self, requestfilename):
def post(self, request):
def post(self, request, filename):
# def post(self, request):

st = datetime.datetime.now()
result_json = dict()
try:
files = request.FILES.getlist('rate_data')
if files:
parse_indicators_xls(files[0].read())
result_json['msg'] = '指标文件上传完成'
files = request.FILES.getlist('mf_data')
if files:
parse_malfunction_data_xlsx(files[0])
# 使用binary方式上传文件
file = request.FILES.get('file')
if filename == 'mf_data':
parse_malfunction_data_xlsx(file)
result_json['msg'] = '故障数据上传完成'
files = request.FILES.getlist('longtime_data')
if files:
parse_malfunction_longtime(files[0].read())
if filename == 'longtime_data':
parse_malfunction_longtime(file.read())
result_json['msg'] = '超72小时工单文件上传完成'
files = request.FILES.getlist('track_data')
if files:
parse_malfunction_track(files[0].read())
if filename == 'track_data':
parse_malfunction_track(file.read())
result_json['msg'] = '遗留跟踪单文件上传完成'
# 使用binary方式上传文件
# file = request.FILES.get('file')
# print(len(file))
# print(filename)
# print(file.name)
# rs_list = parse_indicators_xls(file.read())

result_json['process_time'] = str(datetime.datetime.now() - st)
Expand All @@ -353,6 +350,31 @@ def post(self, request):
return JsonResponse(data=result_json, safe=False)


# 指标文件.xls上传
class IndicatorUploadView(APIView):
# parser_classes = (MultiPartParser, FileUploadParser,)

def post(self, request, year, num):
# def post(self, request):

st = datetime.datetime.now()
result_json = dict()
try:
files = request.FILES.getlist('rate_data')
if request.POST.get('type', '') == 'month':
parse_indicators_xls(files[0].read(), type='month', year=year, month=num)
elif request.POST.get('type', '') == 'quarter':
parse_indicators_xls(files[0].read(), type='quarter', year=year, quarter=num)
result_json['msg'] = '指标文件上传完成'
result_json['process_time'] = str(datetime.datetime.now() - st)
result_json['status'] = 'success'
return JsonResponse(data=result_json, safe=False)
except Exception as e:
result_json['status'] = 'fail'
result_json['msg'] = str(e)
return JsonResponse(data=result_json, safe=False)


class Worst10DepartmentView(APIView):

def get(self, request, year, month):
Expand Down Expand Up @@ -394,20 +416,26 @@ def get(self, request, year, month):

class CityRateView(APIView):

def get(self, request):
def get(self, request, year, month):
st = datetime.datetime.now()
result_json = dict()
try:
rs_list = []
qs = StatisticsMonthlyQuality.objects.all()
sorted_list = []
qs = StatisticsMonthlyQuality.objects.filter(yearNum=year, monthNum=month)
cities = City.objects.all()
for q in qs:
d = dict()
d['city'] = q.city
d['signRate'] = str(q.signRate)
d['autoRate'] = str(q.autoRate)
d['dealRate'] = str(q.dealRate)
rs_list.append(d)
result_json['result'] = rs_list
for city_obj in cities:
for d in rs_list:
if city_obj.city == d['city']:
sorted_list.append(d)
result_json['result'] = sorted_list
result_json['process_time'] = str(datetime.datetime.now() - st)
result_json['status'] = 'success'
return JsonResponse(data=result_json, safe=False)
Expand Down
10 changes: 6 additions & 4 deletions report/urls.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

from report.statistics_views import OrderAmountView, IntimeRateView, DealtimeView, Over48RateView, DealQualityView, SpecificDealtimeAmountView, \
Top10NeView, SumAmountComparedView, DistrictReasonView, FileUploadView, Worst10DepartmentView, CityRateView, Top10NeExportView, \
WeeklyLongTimeView, WeeklyTrackView
WeeklyLongTimeView, WeeklyTrackView, IndicatorUploadView

from django.views.decorators.cache import cache_page

Expand All @@ -17,10 +17,12 @@
path('top10ne/<int:year>/<int:month>', Top10NeView.as_view()),
path('amountcompare/<int:year>/<int:month>', cache_page(60 * 60 * 24)(SumAmountComparedView.as_view())),
path('districtreason/<int:year>/<int:month>', cache_page(60 * 60 * 24)(DistrictReasonView.as_view())),
path('cityrate/', CityRateView.as_view()),
path('upload/', FileUploadView.as_view()),
path('cityrate/<year>/<month>', CityRateView.as_view()),
# path('upload/', FileUploadView.as_view()),
# 直接上传binary方式
# path('upload/<filename>', FileUploadView.as_view())
path('upload/<filename>', FileUploadView.as_view()),
# 指标文件上传
path('indicator/<year>/<num>', IndicatorUploadView.as_view()),
path('worst10department/<int:year>/<int:month>', cache_page(60 * 60 * 24)(Worst10DepartmentView.as_view())),
path('longtime/', WeeklyLongTimeView.as_view()),
path('track/<str:begin_date>/<str:end_date>/', WeeklyTrackView.as_view()),
Expand Down
69 changes: 34 additions & 35 deletions utils/data_collect.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,7 @@

from django.db.models import Count, Avg, Sum

from report.models import MalfunctionData, City, DistrictCity, StatisticsQuarterlyAmount, District, StatisticsQuarterlyReason, \
StatisticsQuarterlySpecificDealTime, StatisticsMonthlyQuality
from report.models import MalfunctionData, City, DistrictCity, StatisticsQuarterlyAmount, District, StatisticsQuarterlyQuality


def collect_order_amount(statistics_type, year, quarter, month, day):
Expand Down Expand Up @@ -324,39 +323,39 @@ def get_district_over_48h_rate(statistics_type, year, quarter, month, day, begin
def collect_deal_quality(statistics_type, year=2018, quarter=1, month=1, day=1, begin_datetime="", end_datetime=""):
result_list = []
result = dict()
try:
intime_rate_result = collect_deal_in_time_rate(statistics_type, year, quarter, month, day, begin_datetime, end_datetime)
deal_time_result = collect_deal_time(statistics_type, year, quarter, month, day, begin_datetime, end_datetime)
ovre48_rate_result = collect_over_48h_rate(statistics_type, year, quarter, month, day, begin_datetime, end_datetime)
sign_rate_result = StatisticsMonthlyQuality.objects.all()
for i in range(1, 6):
cities = get_cities_by_district_id(i)
district = District.objects.get(id=i).district
for city in cities:
result_item = dict()
result_item['area'] = district
result_item['city'] = city
for intime_rate_item in intime_rate_result.get('result'):
if intime_rate_item.get("city") == city:
result_item['IntimeRate'] = intime_rate_item.get('IntimeRate')
for deal_time_item in deal_time_result.get('result'):
if deal_time_item.get('city') == city:
result_item['AverageTime'] = deal_time_item.get('AverageTime')
for ovre48_rate_item in ovre48_rate_result.get('result'):
if ovre48_rate_item.get('city') == city:
result_item['Over48Rate'] = ovre48_rate_item.get("Over48Rate")
for sign_rate_item in sign_rate_result:
if sign_rate_item.city == city:
result_item['SignRate'] = str(sign_rate_item.signRate)
result_list.append(result_item)

result['status'] = "success"
result['result'] = result_list
return result
except Exception as e:
result['status'] = "fail"
result['msg'] = str(e)
return result
# try:
intime_rate_result = collect_deal_in_time_rate(statistics_type, year, quarter, month, day, begin_datetime, end_datetime)
deal_time_result = collect_deal_time(statistics_type, year, quarter, month, day, begin_datetime, end_datetime)
ovre48_rate_result = collect_over_48h_rate(statistics_type, year, quarter, month, day, begin_datetime, end_datetime)
sign_rate_result = StatisticsQuarterlyQuality.objects.filter(beginDate=begin_datetime, endDate=end_datetime)
for i in range(1, 6):
cities = get_cities_by_district_id(i)
district = District.objects.get(id=i).district
for city in cities:
result_item = dict()
result_item['area'] = district
result_item['city'] = city
for intime_rate_item in intime_rate_result.get('result'):
if intime_rate_item.get("city") == city:
result_item['IntimeRate'] = intime_rate_item.get('IntimeRate')
for deal_time_item in deal_time_result.get('result'):
if deal_time_item.get('city') == city:
result_item['AverageTime'] = deal_time_item.get('AverageTime')
for ovre48_rate_item in ovre48_rate_result.get('result'):
if ovre48_rate_item.get('city') == city:
result_item['Over48Rate'] = ovre48_rate_item.get("Over48Rate")
for sign_rate_item in sign_rate_result:
if sign_rate_item.city == city:
result_item['SignRate'] = str(sign_rate_item.SignRate) if sign_rate_item.SignRate else ''
result_list.append(result_item)

result['status'] = "success"
result['result'] = result_list
return result
# except Exception as e:
result['status'] = "fail"
result['msg'] = str(e)
return result


def collect_specific_dealtime_amount(statistics_type, year=1, quarter=1, month=1, day=1, begin_datetime="", end_datetime=""):
Expand Down
12 changes: 8 additions & 4 deletions utils/data_collect_AN.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,9 +103,10 @@ def get_worst10_department(begin_datetime, end_datetime):
FROM
( SELECT dutyDepartment, COUNT( * ) co FROM report_malfunction_data WHERE isTimeOut = '否' AND distributeTime BETWEEN %s AND %s GROUP BY dutyDepartment ) T1,
( SELECT dutyDepartment, COUNT( * ) totalCo FROM report_malfunction_data WHERE distributeTime BETWEEN %s AND %s GROUP BY dutyDepartment ) T2
WHERE T1.dutyDepartment = T2.dutyDepartment
ORDER BY inTimeRate,inTimeAmount DESC """, [begin_datetime, end_datetime, begin_datetime, end_datetime])
rows = cursor.fetchall()[:10]
WHERE T1.dutyDepartment = T2.dutyDepartment
ORDER BY inTimeRate,inTimeAmount DESC """,
[begin_datetime, end_datetime, begin_datetime, end_datetime])
rows = cursor.fetchall()
result_list = []
for row in rows:
d = dict()
Expand All @@ -114,5 +115,8 @@ def get_worst10_department(begin_datetime, end_datetime):
d['intime_amount'] = str(row[2])
d['timeout_admount'] = str(row[3] - row[2])
d['intime_rate'] = str(row[1])
result_list.append(d)
if row[3] - row[2] >= 30:
result_list.append(d)
if len(result_list) == 10:
return result_list
return result_list
Loading

0 comments on commit a22c088

Please sign in to comment.